Rework app into a class

This simplifies usage, goes from

	app['Foo']
to
	app.Foo

Also makes static analyzers able to detect invalid attributes as the set
is now limited in the class definition.

As a bonus, setting of the default field values is now done in the
constructor, not separately and manually.
This commit is contained in:
Daniel Martí 2015-11-28 13:09:47 +01:00
parent de12cfdbe1
commit ab614ab442
18 changed files with 515 additions and 433 deletions

View File

@ -320,11 +320,11 @@ def build_server(app, thisbuild, vcs, build_dir, output_dir, force):
ftp.mkdir('metadata')
ftp.mkdir('srclibs')
ftp.chdir('metadata')
ftp.put(os.path.join('metadata', app['id'] + '.txt'),
app['id'] + '.txt')
ftp.put(os.path.join('metadata', app.id + '.txt'),
app.id + '.txt')
# And patches if there are any...
if os.path.exists(os.path.join('metadata', app['id'])):
send_dir(os.path.join('metadata', app['id']))
if os.path.exists(os.path.join('metadata', app.id)):
send_dir(os.path.join('metadata', app.id))
ftp.chdir(homedir)
# Create the build directory...
@ -375,7 +375,7 @@ def build_server(app, thisbuild, vcs, build_dir, output_dir, force):
# (no need if it's a srclib)
if (not basesrclib) and os.path.exists(build_dir):
ftp.chdir(homedir + '/build')
fv = '.fdroidvcs-' + app['id']
fv = '.fdroidvcs-' + app.id
ftp.put(os.path.join('build', fv), fv)
send_dir(build_dir)
@ -389,7 +389,7 @@ def build_server(app, thisbuild, vcs, build_dir, output_dir, force):
cmdline += ' --force --test'
if options.verbose:
cmdline += ' --verbose'
cmdline += " %s:%s" % (app['id'], thisbuild['vercode'])
cmdline += " %s:%s" % (app.id, thisbuild['vercode'])
chan.exec_command('bash -c ". ~/.bsenv && ' + cmdline + '"')
output = ''
while not chan.exit_status_ready():
@ -406,7 +406,7 @@ def build_server(app, thisbuild, vcs, build_dir, output_dir, force):
if returncode != 0:
raise BuildException(
"Build.py failed on server for {0}:{1}".format(
app['id'], thisbuild['version']), output)
app.id, thisbuild['version']), output)
# Retrieve the built files...
logging.info("Retrieving build output...")
@ -423,7 +423,7 @@ def build_server(app, thisbuild, vcs, build_dir, output_dir, force):
except:
raise BuildException(
"Build failed for %s:%s - missing output files".format(
app['id'], thisbuild['version']), output)
app.id, thisbuild['version']), output)
ftp.close()
finally:
@ -543,7 +543,7 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d
if p is not None and p.returncode != 0:
raise BuildException("Error cleaning %s:%s" %
(app['id'], thisbuild['version']), p.output)
(app.id, thisbuild['version']), p.output)
for root, dirs, files in os.walk(build_dir):
@ -612,7 +612,7 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d
if p.returncode != 0:
raise BuildException("Error running build command for %s:%s" %
(app['id'], thisbuild['version']), p.output)
(app.id, thisbuild['version']), p.output)
# Build native stuff if required...
if thisbuild['buildjni'] and thisbuild['buildjni'] != ['no']:
@ -640,7 +640,7 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d
del manifest_text
p = FDroidPopen(cmd, cwd=os.path.join(root_dir, d))
if p.returncode != 0:
raise BuildException("NDK build failed for %s:%s" % (app['id'], thisbuild['version']), p.output)
raise BuildException("NDK build failed for %s:%s" % (app.id, thisbuild['version']), p.output)
p = None
# Build the release...
@ -702,7 +702,7 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d
raise BuildException("Distribute build failed")
cid = bconfig.get('app', 'package.domain') + '.' + bconfig.get('app', 'package.name')
if cid != app['id']:
if cid != app.id:
raise BuildException("Package ID mismatch between metadata and spec")
orientation = bconfig.get('app', 'orientation', 'landscape')
@ -712,7 +712,7 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d
cmd = ['./build.py'
'--dir', root_dir,
'--name', bconfig.get('app', 'title'),
'--package', app['id'],
'--package', app.id,
'--version', bconfig.get('app', 'version'),
'--orientation', orientation
]
@ -759,8 +759,8 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d
bindir = os.path.join(root_dir, 'bin')
if p is not None and p.returncode != 0:
raise BuildException("Build failed for %s:%s" % (app['id'], thisbuild['version']), p.output)
logging.info("Successfully built version " + thisbuild['version'] + ' of ' + app['id'])
raise BuildException("Build failed for %s:%s" % (app.id, thisbuild['version']), p.output)
logging.info("Successfully built version " + thisbuild['version'] + ' of ' + app.id)
if thisbuild['type'] == 'maven':
stdout_apk = '\n'.join([
@ -860,8 +860,8 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d
raise BuildException("Could not find version information in build in output")
if not foundid:
raise BuildException("Could not find package ID in output")
if foundid != app['id']:
raise BuildException("Wrong package ID - build " + foundid + " but expected " + app['id'])
if foundid != app.id:
raise BuildException("Wrong package ID - build " + foundid + " but expected " + app.id)
# Some apps (e.g. Timeriffic) have had the bonkers idea of
# including the entire changelog in the version number. Remove
@ -941,7 +941,7 @@ def trybuild(app, thisbuild, build_dir, output_dir, also_check_dir, srclib_dir,
return False
logging.info("Building version %s (%s) of %s" % (
thisbuild['version'], thisbuild['vercode'], app['id']))
thisbuild['version'], thisbuild['vercode'], app.id))
if server:
# When using server mode, still keep a local cache of the repo, by
@ -1051,7 +1051,7 @@ def main():
apps = common.read_app_args(options.appid, allapps, True)
for appid, app in apps.items():
if (app['Disabled'] and not options.force) or not app['Repo Type'] or not app['builds']:
if (app.Disabled and not options.force) or not app.RepoType or not app.builds:
del apps[appid]
if not apps:
@ -1059,10 +1059,10 @@ def main():
if options.latest:
for app in apps.itervalues():
for build in reversed(app['builds']):
for build in reversed(app.builds):
if build['disable'] and not options.force:
continue
app['builds'] = [build]
app.builds = [build]
break
if options.wiki:
@ -1078,7 +1078,7 @@ def main():
first = True
for thisbuild in app['builds']:
for thisbuild in app.builds:
wikilog = None
try:
@ -1086,15 +1086,15 @@ def main():
# the source repo. We can reuse it on subsequent builds, if
# there are any.
if first:
if app['Repo Type'] == 'srclib':
build_dir = os.path.join('build', 'srclib', app['Repo'])
if app.RepoType == 'srclib':
build_dir = os.path.join('build', 'srclib', app.Repo)
else:
build_dir = os.path.join('build', appid)
# Set up vcs interface and make sure we have the latest code...
logging.debug("Getting {0} vcs interface for {1}"
.format(app['Repo Type'], app['Repo']))
vcs = common.getvcs(app['Repo Type'], app['Repo'], build_dir)
.format(app.RepoType, app.Repo))
vcs = common.getvcs(app.RepoType, app.Repo, build_dir)
first = False
@ -1105,17 +1105,17 @@ def main():
options.server, options.force,
options.onserver, options.refresh):
if app.get('Binaries', None):
if app.Binaries is not None:
# This is an app where we build from source, and
# verify the apk contents against a developer's
# binary. We get that binary now, and save it
# alongside our built one in the 'unsigend'
# directory.
url = app['Binaries']
url = app.Binaries
url = url.replace('%v', thisbuild['version'])
url = url.replace('%c', str(thisbuild['vercode']))
logging.info("...retrieving " + url)
of = "{0}_{1}.apk.binary".format(app['id'], thisbuild['vercode'])
of = "{0}_{1}.apk.binary".format(app.id, thisbuild['vercode'])
of = os.path.join(output_dir, of)
net.download_file(url, local_filename=of)
@ -1159,7 +1159,7 @@ def main():
logging.error("Error while attempting to publish build log")
for app in build_succeeded:
logging.info("success: %s" % (app['id']))
logging.info("success: %s" % (app.id))
if not options.verbose:
for fa in failed_apps:

View File

@ -43,10 +43,10 @@ def check_http(app):
try:
if 'Update Check Data' not in app:
if not app.UpdateCheckData:
raise FDroidException('Missing Update Check Data')
urlcode, codeex, urlver, verex = app['Update Check Data'].split('|')
urlcode, codeex, urlver, verex = app.UpdateCheckData.split('|')
vercode = "99999999"
if len(urlcode) > 0:
@ -76,7 +76,7 @@ def check_http(app):
return (version, vercode)
except FDroidException:
msg = "Could not complete http check for app {0} due to unknown error: {1}".format(app['id'], traceback.format_exc())
msg = "Could not complete http check for app {0} due to unknown error: {1}".format(app.id, traceback.format_exc())
return (None, msg)
@ -90,28 +90,28 @@ def check_tags(app, pattern):
try:
if app['Repo Type'] == 'srclib':
build_dir = os.path.join('build', 'srclib', app['Repo'])
repotype = common.getsrclibvcs(app['Repo'])
if app.RepoType == 'srclib':
build_dir = os.path.join('build', 'srclib', app.Repo)
repotype = common.getsrclibvcs(app.Repo)
else:
build_dir = os.path.join('build', app['id'])
repotype = app['Repo Type']
build_dir = os.path.join('build', app.id)
repotype = app.RepoType
if repotype not in ('git', 'git-svn', 'hg', 'bzr'):
return (None, 'Tags update mode only works for git, hg, bzr and git-svn repositories currently', None)
if repotype == 'git-svn' and ';' not in app['Repo']:
if repotype == 'git-svn' and ';' not in app.Repo:
return (None, 'Tags update mode used in git-svn, but the repo was not set up with tags', None)
# Set up vcs interface and make sure we have the latest code...
vcs = common.getvcs(app['Repo Type'], app['Repo'], build_dir)
vcs = common.getvcs(app.RepoType, app.Repo, build_dir)
vcs.gotorevision(None)
flavours = []
if len(app['builds']) > 0:
if app['builds'][-1]['gradle']:
flavours = app['builds'][-1]['gradle']
if len(app.builds) > 0:
if app.builds[-1]['gradle']:
flavours = app.builds[-1]['gradle']
hpak = None
htag = None
@ -161,10 +161,10 @@ def check_tags(app, pattern):
return (None, "Couldn't find any version information", None)
except VCSException as vcse:
msg = "VCS error while scanning app {0}: {1}".format(app['id'], vcse)
msg = "VCS error while scanning app {0}: {1}".format(app.id, vcse)
return (None, msg, None)
except Exception:
msg = "Could not scan app {0} due to unknown error: {1}".format(app['id'], traceback.format_exc())
msg = "Could not scan app {0} due to unknown error: {1}".format(app.id, traceback.format_exc())
return (None, msg, None)
@ -178,15 +178,15 @@ def check_repomanifest(app, branch=None):
try:
if app['Repo Type'] == 'srclib':
build_dir = os.path.join('build', 'srclib', app['Repo'])
repotype = common.getsrclibvcs(app['Repo'])
if app.RepoType == 'srclib':
build_dir = os.path.join('build', 'srclib', app.Repo)
repotype = common.getsrclibvcs(app.Repo)
else:
build_dir = os.path.join('build', app['id'])
repotype = app['Repo Type']
build_dir = os.path.join('build', app.id)
repotype = app.RepoType
# Set up vcs interface and make sure we have the latest code...
vcs = common.getvcs(app['Repo Type'], app['Repo'], build_dir)
vcs = common.getvcs(app.RepoType, app.Repo, build_dir)
if repotype == 'git':
if branch:
@ -200,9 +200,9 @@ def check_repomanifest(app, branch=None):
vcs.gotorevision(None)
flavours = []
if len(app['builds']) > 0:
if app['builds'][-1]['gradle']:
flavours = app['builds'][-1]['gradle']
if len(app.builds) > 0:
if app.builds[-1]['gradle']:
flavours = app.builds[-1]['gradle']
hpak = None
hver = None
@ -229,38 +229,38 @@ def check_repomanifest(app, branch=None):
return (None, "Couldn't find any version information")
except VCSException as vcse:
msg = "VCS error while scanning app {0}: {1}".format(app['id'], vcse)
msg = "VCS error while scanning app {0}: {1}".format(app.id, vcse)
return (None, msg)
except Exception:
msg = "Could not scan app {0} due to unknown error: {1}".format(app['id'], traceback.format_exc())
msg = "Could not scan app {0} due to unknown error: {1}".format(app.id, traceback.format_exc())
return (None, msg)
def check_repotrunk(app, branch=None):
try:
if app['Repo Type'] == 'srclib':
build_dir = os.path.join('build', 'srclib', app['Repo'])
repotype = common.getsrclibvcs(app['Repo'])
if app.RepoType == 'srclib':
build_dir = os.path.join('build', 'srclib', app.Repo)
repotype = common.getsrclibvcs(app.Repo)
else:
build_dir = os.path.join('build', app['id'])
repotype = app['Repo Type']
build_dir = os.path.join('build', app.id)
repotype = app.RepoType
if repotype not in ('git-svn', ):
return (None, 'RepoTrunk update mode only makes sense in git-svn repositories')
# Set up vcs interface and make sure we have the latest code...
vcs = common.getvcs(app['Repo Type'], app['Repo'], build_dir)
vcs = common.getvcs(app.RepoType, app.Repo, build_dir)
vcs.gotorevision(None)
ref = vcs.getref()
return (ref, ref)
except VCSException as vcse:
msg = "VCS error while scanning app {0}: {1}".format(app['id'], vcse)
msg = "VCS error while scanning app {0}: {1}".format(app.id, vcse)
return (None, msg)
except Exception:
msg = "Could not scan app {0} due to unknown error: {1}".format(app['id'], traceback.format_exc())
msg = "Could not scan app {0} due to unknown error: {1}".format(app.id, traceback.format_exc())
return (None, msg)
@ -269,7 +269,7 @@ def check_repotrunk(app, branch=None):
# the details of the current version.
def check_gplay(app):
time.sleep(15)
url = 'https://play.google.com/store/apps/details?id=' + app['id']
url = 'https://play.google.com/store/apps/details?id=' + app.id
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:18.0) Gecko/20100101 Firefox/18.0'}
req = urllib2.Request(url, None, headers)
try:
@ -308,14 +308,14 @@ def dirs_with_manifest(startdir):
# subdir relative to the build dir if found, None otherwise.
def possible_subdirs(app):
if app['Repo Type'] == 'srclib':
build_dir = os.path.join('build', 'srclib', app['Repo'])
if app.RepoType == 'srclib':
build_dir = os.path.join('build', 'srclib', app.Repo)
else:
build_dir = os.path.join('build', app['id'])
build_dir = os.path.join('build', app.id)
flavours = []
if len(app['builds']) > 0:
build = app['builds'][-1]
if len(app.builds) > 0:
build = app.builds[-1]
if build['gradle']:
flavours = build['gradle']
@ -330,24 +330,24 @@ def possible_subdirs(app):
def fetch_autoname(app, tag):
if not app["Repo Type"] or app['Update Check Mode'] in ('None', 'Static'):
if not app.RepoType or app.UpdateCheckMode in ('None', 'Static'):
return None
if app['Repo Type'] == 'srclib':
build_dir = os.path.join('build', 'srclib', app['Repo'])
if app.RepoType == 'srclib':
build_dir = os.path.join('build', 'srclib', app.Repo)
else:
build_dir = os.path.join('build', app['id'])
build_dir = os.path.join('build', app.id)
try:
vcs = common.getvcs(app["Repo Type"], app["Repo"], build_dir)
vcs = common.getvcs(app.RepoType, app.Repo, build_dir)
vcs.gotorevision(tag)
except VCSException:
return None
flavours = []
if len(app['builds']) > 0:
if app['builds'][-1]['gradle']:
flavours = app['builds'][-1]['gradle']
if len(app.builds) > 0:
if app.builds[-1]['gradle']:
flavours = app.builds[-1]['gradle']
logging.debug("...fetch auto name from " + build_dir)
new_name = None
@ -362,8 +362,8 @@ def fetch_autoname(app, tag):
commitmsg = None
if new_name:
logging.debug("...got autoname '" + new_name + "'")
if new_name != app['Auto Name']:
app['Auto Name'] = new_name
if new_name != app.AutoName:
app.AutoName = new_name
if not commitmsg:
commitmsg = "Set autoname of {0}".format(common.getappname(app))
else:
@ -382,7 +382,7 @@ def checkupdates_app(app, first=True):
msg = None
vercode = None
noverok = False
mode = app['Update Check Mode']
mode = app.UpdateCheckMode
if mode.startswith('Tags'):
pattern = mode[5:] if len(mode) > 4 else None
(version, vercode, tag) = check_tags(app, pattern)
@ -408,9 +408,9 @@ def checkupdates_app(app, first=True):
version = None
msg = 'Invalid update check method'
if version and vercode and app['Vercode Operation']:
if version and vercode and app.VercodeOperation:
oldvercode = str(int(vercode))
op = app['Vercode Operation'].replace("%c", oldvercode)
op = app.VercodeOperation.replace("%c", oldvercode)
vercode = str(eval(op))
logging.debug("Applied vercode operation: %s -> %s" % (oldvercode, vercode))
@ -422,16 +422,16 @@ def checkupdates_app(app, first=True):
updating = False
if version is None:
logmsg = "...{0} : {1}".format(app['id'], msg)
logmsg = "...{0} : {1}".format(app.id, msg)
if noverok:
logging.info(logmsg)
else:
logging.warn(logmsg)
elif vercode == app['Current Version Code']:
elif vercode == app.CurrentVersionCode:
logging.info("...up to date")
else:
app['Current Version'] = version
app['Current Version Code'] = str(int(vercode))
app.CurrentVersion = version
app.CurrentVersionCode = str(int(vercode))
updating = True
commitmsg = fetch_autoname(app, tag)
@ -443,7 +443,7 @@ def checkupdates_app(app, first=True):
commitmsg = 'Update CV of %s to %s' % (name, ver)
if options.auto:
mode = app['Auto Update Mode']
mode = app.AutoUpdateMode
if mode in ('None', 'Static'):
pass
elif mode.startswith('Version '):
@ -457,13 +457,13 @@ def checkupdates_app(app, first=True):
suffix = ''
gotcur = False
latest = None
for build in app['builds']:
if int(build['vercode']) >= int(app['Current Version Code']):
for build in app.builds:
if int(build['vercode']) >= int(app.CurrentVersionCode):
gotcur = True
if not latest or int(build['vercode']) > int(latest['vercode']):
latest = build
if int(latest['vercode']) > int(app['Current Version Code']):
if int(latest['vercode']) > int(app.CurrentVersionCode):
logging.info("Refusing to auto update, since the latest build is newer")
if not gotcur:
@ -471,21 +471,21 @@ def checkupdates_app(app, first=True):
if 'origlines' in newbuild:
del newbuild['origlines']
newbuild['disable'] = False
newbuild['vercode'] = app['Current Version Code']
newbuild['version'] = app['Current Version'] + suffix
newbuild['vercode'] = app.CurrentVersionCode
newbuild['version'] = app.CurrentVersion + suffix
logging.info("...auto-generating build for " + newbuild['version'])
commit = pattern.replace('%v', newbuild['version'])
commit = commit.replace('%c', newbuild['vercode'])
newbuild['commit'] = commit
app['builds'].append(newbuild)
app.builds.append(newbuild)
name = common.getappname(app)
ver = common.getcvname(app)
commitmsg = "Update %s to %s" % (name, ver)
else:
logging.warn('Invalid auto update mode "' + mode + '" on ' + app['id'])
logging.warn('Invalid auto update mode "' + mode + '" on ' + app.id)
if commitmsg:
metadatapath = os.path.join('metadata', app['id'] + '.txt')
metadatapath = os.path.join('metadata', app.id + '.txt')
with open(metadatapath, 'w') as f:
metadata.write_metadata('txt', f, app)
if options.commit:
@ -537,7 +537,7 @@ def main():
else:
logging.info("{0} encountered a problem: {1}".format(common.getappname(app), reason))
if version is not None:
stored = app['Current Version']
stored = app.CurrentVersion
if not stored:
logging.info("{0} has no Current Version but has version {1} on the Play Store"
.format(common.getappname(app), version))
@ -555,7 +555,7 @@ def main():
for appid, app in apps.iteritems():
if options.autoonly and app['Auto Update Mode'] in ('None', 'Static'):
if options.autoonly and app.AutoUpdateMode in ('None', 'Static'):
logging.debug("Nothing to do for {0}...".format(appid))
continue

View File

@ -363,10 +363,10 @@ def read_app_args(args, allapps, allow_vercodes=False):
vc = vercodes[appid]
if not vc:
continue
app['builds'] = [b for b in app['builds'] if b['vercode'] in vc]
if len(app['builds']) != len(vercodes[appid]):
app.builds = [b for b in app.builds if b['vercode'] in vc]
if len(app.builds) != len(vercodes[appid]):
error = True
allvcs = [b['vercode'] for b in app['builds']]
allvcs = [b['vercode'] for b in app.builds]
for v in vercodes[appid]:
if v not in allvcs:
logging.critical("No such vercode %s for app %s" % (v, appid))
@ -419,23 +419,23 @@ def apknameinfo(filename):
def getapkname(app, build):
return "%s_%s.apk" % (app['id'], build['vercode'])
return "%s_%s.apk" % (app.id, build['vercode'])
def getsrcname(app, build):
return "%s_%s_src.tar.gz" % (app['id'], build['vercode'])
return "%s_%s_src.tar.gz" % (app.id, build['vercode'])
def getappname(app):
if app['Name']:
return app['Name']
if app['Auto Name']:
return app['Auto Name']
return app['id']
if app.Name:
return app.Name
if app.AutoName:
return app.AutoName
return app.id
def getcvname(app):
return '%s (%s)' % (app['Current Version'], app['Current Version Code'])
return '%s (%s)' % (app.CurrentVersion, app.CurrentVersionCode)
def getvcs(vcstype, remote, local):
@ -1026,7 +1026,7 @@ psearch_g = re.compile(r'.*(packageName|applicationId) *=* *["\']([^"]+)["\'].*'
def app_matches_packagename(app, package):
if not package:
return False
appid = app['Update Check Name'] or app['id']
appid = app.UpdateCheckName or app.id
if appid is None or appid == "Ignore":
return True
return appid == package
@ -1037,7 +1037,7 @@ def app_matches_packagename(app, package):
# All values returned are strings.
def parse_androidmanifests(paths, app):
ignoreversions = app['Update Check Ignore']
ignoreversions = app.UpdateCheckIgnore
ignoresearch = re.compile(ignoreversions).search if ignoreversions else None
if not paths:
@ -1277,7 +1277,7 @@ def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver=
p = FDroidPopen(['bash', '-x', '-c', cmd], cwd=root_dir)
if p.returncode != 0:
raise BuildException("Error running init command for %s:%s" %
(app['id'], build['version']), p.output)
(app.id, build['version']), p.output)
# Apply patches if any
if build['patch']:
@ -1285,7 +1285,7 @@ def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver=
for patch in build['patch']:
patch = patch.strip()
logging.info("Applying " + patch)
patch_path = os.path.join('metadata', app['id'], patch)
patch_path = os.path.join('metadata', app.id, patch)
p = FDroidPopen(['patch', '-p1', '-i', os.path.abspath(patch_path)], cwd=build_dir)
if p.returncode != 0:
raise BuildException("Failed to apply patch %s" % patch_path)
@ -1460,7 +1460,7 @@ def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver=
p = FDroidPopen(['bash', '-x', '-c', cmd], cwd=root_dir)
if p.returncode != 0:
raise BuildException("Error running prebuild command for %s:%s" %
(app['id'], build['version']), p.output)
(app.id, build['version']), p.output)
# Generate (or update) the ant build file, build.xml...
if build['update'] and build['update'] != ['no'] and build['type'] == 'ant':

View File

@ -79,20 +79,20 @@ def get_metadata_from_url(app, url):
# Figure out what kind of project it is...
projecttype = None
app['Web Site'] = url # by default, we might override it
app.WebSite = url # by default, we might override it
if url.startswith('git://'):
projecttype = 'git'
repo = url
repotype = 'git'
app['Source Code'] = ""
app['Web Site'] = ""
app.SourceCode = ""
app.WebSite = ""
elif url.startswith('https://github.com'):
projecttype = 'github'
repo = url
repotype = 'git'
app['Source Code'] = url
app['Issue Tracker'] = url + '/issues'
app['Web Site'] = ""
app.SourceCode = url
app.IssueTracker = url + '/issues'
app.WebSite = ""
elif url.startswith('https://gitlab.com/'):
projecttype = 'gitlab'
# git can be fussy with gitlab URLs unless they end in .git
@ -101,16 +101,16 @@ def get_metadata_from_url(app, url):
else:
repo = url + '.git'
repotype = 'git'
app['Source Code'] = url + '/tree/HEAD'
app['Issue Tracker'] = url + '/issues'
app.SourceCode = url + '/tree/HEAD'
app.IssueTracker = url + '/issues'
elif url.startswith('https://bitbucket.org/'):
if url.endswith('/'):
url = url[:-1]
projecttype = 'bitbucket'
app['Source Code'] = url + '/src'
app['Issue Tracker'] = url + '/issues'
app.SourceCode = url + '/src'
app.IssueTracker = url + '/issues'
# Figure out the repo type and adddress...
repotype, repo = getrepofrompage(app['Source Code'])
repotype, repo = getrepofrompage(app.SourceCode)
if not repotype:
logging.error("Unable to determine vcs type. " + repo)
sys.exit(1)
@ -139,8 +139,8 @@ def get_metadata_from_url(app, url):
vcs.gotorevision(options.rev)
root_dir = get_subdir(build_dir)
app['Repo Type'] = repotype
app['Repo'] = repo
app.RepoType = repotype
app.Repo = repo
return root_dir, build_dir
@ -175,8 +175,8 @@ def main():
apps = metadata.read_metadata()
package, app = metadata.get_default_app_info()
app['id'] = None
app['Update Check Mode'] = "Tags"
app.id = None
app.UpdateCheckMode = "Tags"
root_dir = None
build_dir = None
@ -185,7 +185,7 @@ def main():
root_dir, build_dir = get_metadata_from_url(app, options.url)
elif os.path.isdir('.git'):
if options.url:
app['Web Site'] = options.url
app.WebSite = options.url
root_dir = get_subdir(os.getcwd())
else:
logging.error("Specify project url.")
@ -238,7 +238,7 @@ def main():
continue
build[flag] = value
app['builds'].append(build)
app.builds.append(build)
# Keep the repo directory to save bandwidth...
if not os.path.exists('build'):
@ -246,7 +246,7 @@ def main():
if build_dir is not None:
shutil.move(build_dir, os.path.join('build', package))
with open('build/.fdroidvcs-' + package, 'w') as f:
f.write(app['Repo Type'] + ' ' + app['Repo'])
f.write(app.RepoType + ' ' + app.Repo)
metadatapath = os.path.join('metadata', package + '.txt')
with open(metadatapath, 'w') as f:

View File

@ -106,7 +106,7 @@ regex_checks = {
def check_regexes(app):
for f, checks in regex_checks.iteritems():
for m, r in checks:
v = app[f]
v = app.get_field(f)
if type(v) == str:
if v is None:
continue
@ -132,27 +132,27 @@ def get_lastbuild(builds):
def check_ucm_tags(app):
lastbuild = get_lastbuild(app['builds'])
lastbuild = get_lastbuild(app.builds)
if (lastbuild is not None
and lastbuild['commit']
and app['Update Check Mode'] == 'RepoManifest'
and app.UpdateCheckMode == 'RepoManifest'
and not lastbuild['commit'].startswith('unknown')
and lastbuild['vercode'] == app['Current Version Code']
and lastbuild['vercode'] == app.CurrentVersionCode
and not lastbuild['forcevercode']
and any(s in lastbuild['commit'] for s in '.,_-/')):
yield "Last used commit '%s' looks like a tag, but Update Check Mode is '%s'" % (
lastbuild['commit'], app['Update Check Mode'])
lastbuild['commit'], app.UpdateCheckMode)
def check_char_limits(app):
limits = config['char_limits']
summ_chars = len(app['Summary'])
summ_chars = len(app.Summary)
if summ_chars > limits['Summary']:
yield "Summary of length %s is over the %i char limit" % (
summ_chars, limits['Summary'])
desc_charcount = sum(len(l) for l in app['Description'])
desc_charcount = sum(len(l) for l in app.Description)
if desc_charcount > limits['Description']:
yield "Description of length %s is over the %i char limit" % (
desc_charcount, limits['Description'])
@ -168,31 +168,28 @@ def check_old_links(app):
'gitorious.org',
'code.google.com',
]
if any(s in app['Repo'] for s in usual_sites):
if any(s in app.Repo for s in usual_sites):
for f in ['Web Site', 'Source Code', 'Issue Tracker', 'Changelog']:
if any(s in app[f] for s in old_sites):
yield "App is in '%s' but has a link to '%s'" % (app['Repo'], app[f])
v = app.get_field(f)
if any(s in v for s in old_sites):
yield "App is in '%s' but has a link to '%s'" % (app.Repo, v)
def check_useless_fields(app):
if app['Update Check Name'] == app['id']:
if app.UpdateCheckName == app.id:
yield "Update Check Name is set to the known app id - it can be removed"
filling_ucms = re.compile(r'^(Tags.*|RepoManifest.*)')
def check_checkupdates_ran(app):
if filling_ucms.match(app['Update Check Mode']):
if all(app[f] == metadata.app_defaults[f] for f in [
'Auto Name',
'Current Version',
'Current Version Code',
]):
if filling_ucms.match(app.UpdateCheckMode):
if not app.AutoName and not app.CurrentVersion and app.CurrentVersionCode == '0':
yield "UCM is set but it looks like checkupdates hasn't been run yet"
def check_empty_fields(app):
if not app['Categories']:
if not app.Categories:
yield "Categories are not set"
all_categories = Set([
@ -217,37 +214,37 @@ all_categories = Set([
def check_categories(app):
for categ in app['Categories']:
for categ in app.Categories:
if categ not in all_categories:
yield "Category '%s' is not valid" % categ
def check_duplicates(app):
if app['Name'] and app['Name'] == app['Auto Name']:
yield "Name '%s' is just the auto name - remove it" % app['Name']
if app.Name and app.Name == app.AutoName:
yield "Name '%s' is just the auto name - remove it" % app.Name
links_seen = set()
for f in ['Source Code', 'Web Site', 'Issue Tracker', 'Changelog']:
if not app[f]:
v = app.get_field(f)
if not v:
continue
v = app[f].lower()
v = v.lower()
if v in links_seen:
yield "Duplicate link in '%s': %s" % (f, v)
else:
links_seen.add(v)
name = app['Name'] or app['Auto Name']
if app['Summary'] and name:
if app['Summary'].lower() == name.lower():
yield "Summary '%s' is just the app's name" % app['Summary']
name = app.Name or app.AutoName
if app.Summary and name:
if app.Summary.lower() == name.lower():
yield "Summary '%s' is just the app's name" % app.Summary
desc = app['Description']
if app['Summary'] and desc and len(desc) == 1:
if app['Summary'].lower() == desc[0].lower():
yield "Description '%s' is just the app's summary" % app['Summary']
if app.Summary and app.Description and len(app.Description) == 1:
if app.Summary.lower() == app.Description[0].lower():
yield "Description '%s' is just the app's summary" % app.Summary
seenlines = set()
for l in app['Description']:
for l in app.Description:
if len(l) < 1:
continue
if l in seenlines:
@ -259,7 +256,7 @@ desc_url = re.compile(r'(^|[^[])\[([^ ]+)( |\]|$)')
def check_mediawiki_links(app):
wholedesc = ' '.join(app['Description'])
wholedesc = ' '.join(app.Description)
for um in desc_url.finditer(wholedesc):
url = um.group(1)
for m, r in http_checks:
@ -271,7 +268,7 @@ def check_bulleted_lists(app):
validchars = ['*', '#']
lchar = ''
lcount = 0
for l in app['Description']:
for l in app.Description:
if len(l) < 1:
lcount = 0
continue
@ -287,7 +284,7 @@ def check_bulleted_lists(app):
def check_builds(app):
for build in app['builds']:
for build in app.builds:
if build['disable']:
continue
for s in ['master', 'origin', 'HEAD', 'default', 'trunk']:
@ -318,7 +315,7 @@ def main():
apps = common.read_app_args(options.appid, allapps, False)
for appid, app in apps.iteritems():
if app['Disabled']:
if app.Disabled:
continue
warns = []

View File

@ -53,43 +53,128 @@ class MetaDataException(Exception):
def __str__(self):
return self.value
# In the order in which they are laid out on files
app_defaults = OrderedDict([
('Disabled', None),
('AntiFeatures', []),
('Provides', None),
('Categories', ['None']),
('License', 'Unknown'),
('Web Site', ''),
('Source Code', ''),
('Issue Tracker', ''),
('Changelog', ''),
('Donate', None),
('FlattrID', None),
('Bitcoin', None),
('Litecoin', None),
('Name', None),
('Auto Name', ''),
('Summary', ''),
('Description', []),
('Requires Root', False),
('Repo Type', ''),
('Repo', ''),
('Binaries', None),
('Maintainer Notes', []),
('Archive Policy', None),
('Auto Update Mode', 'None'),
('Update Check Mode', 'None'),
('Update Check Ignore', None),
('Vercode Operation', None),
('Update Check Name', None),
('Update Check Data', None),
('Current Version', ''),
('Current Version Code', '0'),
('No Source Since', ''),
app_fields = set([
'Disabled',
'AntiFeatures',
'Provides',
'Categories',
'License',
'Web Site',
'Source Code',
'Issue Tracker',
'Changelog',
'Donate',
'FlattrID',
'Bitcoin',
'Litecoin',
'Name',
'Auto Name',
'Summary',
'Description',
'Requires Root',
'Repo Type',
'Repo',
'Binaries',
'Maintainer Notes',
'Archive Policy',
'Auto Update Mode',
'Update Check Mode',
'Update Check Ignore',
'Vercode Operation',
'Update Check Name',
'Update Check Data',
'Current Version',
'Current Version Code',
'No Source Since',
'comments', # For formats that don't do inline comments
'builds', # For formats that do builds as a list
])
class App():
def __init__(self):
self.Disabled = None
self.AntiFeatures = []
self.Provides = None
self.Categories = ['None']
self.License = 'Unknown'
self.WebSite = ''
self.SourceCode = ''
self.IssueTracker = ''
self.Changelog = ''
self.Donate = None
self.FlattrID = None
self.Bitcoin = None
self.Litecoin = None
self.Name = None
self.AutoName = ''
self.Summary = ''
self.Description = []
self.RequiresRoot = False
self.RepoType = ''
self.Repo = ''
self.Binaries = None
self.MaintainerNotes = []
self.ArchivePolicy = None
self.AutoUpdateMode = 'None'
self.UpdateCheckMode = 'None'
self.UpdateCheckIgnore = None
self.VercodeOperation = None
self.UpdateCheckName = None
self.UpdateCheckData = None
self.CurrentVersion = ''
self.CurrentVersionCode = '0'
self.NoSourceSince = ''
self.id = None
self.metadatapath = None
self.builds = []
self.comments = {}
self.added = None
self.lastupdated = None
@classmethod
def field_to_attr(cls, f):
return f.replace(' ', '')
@classmethod
def attr_to_field(cls, k):
if k in app_fields:
return k
f = re.sub(r'([a-z])([A-Z])', r'\1 \2', k)
return f
def field_dict(self):
return {App.attr_to_field(k): v for k, v in self.__dict__.iteritems()}
def get_field(self, f):
if f not in app_fields:
raise MetaDataException('Unrecognised app field: ' + f)
k = App.field_to_attr(f)
return getattr(self, k)
def set_field(self, f, v):
if f not in app_fields:
raise MetaDataException('Unrecognised app field: ' + f)
k = App.field_to_attr(f)
self.__dict__[k] = v
def append_field(self, f, v):
if f not in app_fields:
raise MetaDataException('Unrecognised app field: ' + f)
k = App.field_to_attr(f)
if k not in self.__dict__:
self.__dict__[k] = [v]
else:
self.__dict__[k].append(v)
def update_fields(self, d):
for f, v in d.iteritems():
self.set_field(f, v)
# In the order in which they are laid out on files
# Sorted by their action and their place in the build timeline
# These variables can have varying datatypes. For example, anything with
@ -238,13 +323,13 @@ valuetypes = {
# Check an app's metadata information for integrity errors
def check_metadata(info):
def check_metadata(app):
for v in valuetypes:
for field in v.fields:
v.check(info[field], info['id'])
for build in info['builds']:
v.check(app.get_field(field), app.id)
for build in app.builds:
for attr in v.attrs:
v.check(build[attr], info['id'])
v.check(build[attr], app.id)
# Formatter for descriptions. Create an instance, and call parseline() with
@ -519,11 +604,11 @@ def read_metadata(xref=True):
+ glob.glob(os.path.join('metadata', '*.json'))
+ glob.glob(os.path.join('metadata', '*.xml'))
+ glob.glob(os.path.join('metadata', '*.yaml'))):
appid, appinfo = parse_metadata(metadatapath)
if appid in apps:
raise MetaDataException("Found multiple metadata files for " + appid)
check_metadata(appinfo)
apps[appid] = appinfo
app = parse_metadata(metadatapath)
if app.id in apps:
raise MetaDataException("Found multiple metadata files for " + app.id)
check_metadata(app)
apps[app.id] = app
if xref:
# Parse all descriptions at load time, just to ensure cross-referencing
@ -535,7 +620,7 @@ def read_metadata(xref=True):
for appid, app in apps.iteritems():
try:
description_html(app['Description'], linkres)
description_html(app.Description, linkres)
except MetaDataException, e:
raise MetaDataException("Problem with description of " + appid +
" - " + str(e))
@ -555,7 +640,7 @@ def metafieldtype(name):
return 'buildv2'
if name == 'Use Built':
return 'obsolete'
if name not in app_defaults:
if name not in app_fields:
return 'unknown'
return 'string'
@ -603,44 +688,38 @@ def get_default_app_info(metadatapath=None):
else:
appid, _ = common.get_extension(os.path.basename(metadatapath))
thisinfo = {}
thisinfo.update(app_defaults)
thisinfo['metadatapath'] = metadatapath
app = App()
app.metadatapath = metadatapath
if appid is not None:
thisinfo['id'] = appid
app.id = appid
# General defaults...
thisinfo['builds'] = []
thisinfo['comments'] = dict()
return appid, thisinfo
return app
def sorted_builds(builds):
return sorted(builds, key=lambda build: int(build['vercode']))
def post_metadata_parse(thisinfo):
def post_metadata_parse(app):
supported_metadata = app_defaults.keys() + ['comments', 'builds', 'id', 'metadatapath']
for k, v in thisinfo.iteritems():
if k not in supported_metadata:
raise MetaDataException("Unrecognised metadata: {0}: {1}"
.format(k, v))
for f in app_fields:
v = app.get_field(f)
if type(v) in (float, int):
thisinfo[k] = str(v)
app.set_field(f, str(v))
# convert to the odd internal format
for k in ('Description', 'Maintainer Notes'):
if isinstance(thisinfo[k], basestring):
text = thisinfo[k].rstrip().lstrip()
thisinfo[k] = text.split('\n')
for f in ('Description', 'Maintainer Notes'):
v = app.get_field(f)
if isinstance(v, basestring):
text = v.rstrip().lstrip()
app.set_field(f, text.split('\n'))
supported_flags = (flag_defaults.keys()
+ ['vercode', 'version', 'versionCode', 'versionName'])
+ ['vercode', 'version', 'versionCode', 'versionName',
'type', 'ndk_path'])
esc_newlines = re.compile('\\\\( |\\n)')
for build in thisinfo['builds']:
for build in app.builds:
for k, v in build.items():
if k not in supported_flags:
raise MetaDataException("Unrecognised build flag: {0}={1}"
@ -683,13 +762,13 @@ def post_metadata_parse(thisinfo):
if isinstance(v, bool):
build[k] = 'yes' if v else 'no'
if not thisinfo['Description']:
thisinfo['Description'].append('No description available')
if not app.Description:
app.Description = ['No description available']
for build in thisinfo['builds']:
for build in app.builds:
fill_build_defaults(build)
thisinfo['builds'] = sorted_builds(thisinfo['builds'])
app.builds = sorted_builds(app.builds)
# Parse metadata for a single application.
@ -772,7 +851,7 @@ def parse_metadata(metadatapath):
def parse_json_metadata(metadatapath):
appid, thisinfo = get_default_app_info(metadatapath)
app = get_default_app_info(metadatapath)
# fdroid metadata is only strings and booleans, no floats or ints. And
# json returns unicode, and fdroidserver still uses plain python strings
@ -781,15 +860,15 @@ def parse_json_metadata(metadatapath):
object_hook=_decode_dict,
parse_int=lambda s: s,
parse_float=lambda s: s)
thisinfo.update(jsoninfo)
post_metadata_parse(thisinfo)
app.update_fields(jsoninfo)
post_metadata_parse(app)
return (appid, thisinfo)
return app
def parse_xml_metadata(metadatapath):
appid, thisinfo = get_default_app_info(metadatapath)
app = get_default_app_info(metadatapath)
tree = ElementTree.ElementTree(file=metadatapath)
root = tree.getroot()
@ -798,54 +877,46 @@ def parse_xml_metadata(metadatapath):
logging.critical(metadatapath + ' does not have root as <resources></resources>!')
sys.exit(1)
supported_metadata = app_defaults.keys()
for child in root:
if child.tag != 'builds':
# builds does not have name="" attrib
name = child.attrib['name']
if name not in supported_metadata:
raise MetaDataException("Unrecognised metadata: <"
+ child.tag + ' name="' + name + '">'
+ child.text
+ "</" + child.tag + '>')
if child.tag == 'string':
thisinfo[name] = child.text
app.set_field(name, child.text)
elif child.tag == 'string-array':
items = []
for item in child:
items.append(item.text)
thisinfo[name] = items
app.set_field(name, items)
elif child.tag == 'builds':
builds = []
for build in child:
builddict = dict()
for key in build:
builddict[key.tag] = key.text
builds.append(builddict)
thisinfo['builds'] = builds
app.builds.append(builddict)
# TODO handle this using <xsd:element type="xsd:boolean> in a schema
if not isinstance(thisinfo['Requires Root'], bool):
if thisinfo['Requires Root'] == 'true':
thisinfo['Requires Root'] = True
if not isinstance(app.RequiresRoot, bool):
if app.RequiresRoot == 'true':
app.RequiresRoot = True
else:
thisinfo['Requires Root'] = False
app.RequiresRoot = False
post_metadata_parse(thisinfo)
post_metadata_parse(app)
return (appid, thisinfo)
return app
def parse_yaml_metadata(metadatapath):
appid, thisinfo = get_default_app_info(metadatapath)
app = get_default_app_info(metadatapath)
yamlinfo = yaml.load(open(metadatapath, 'r'), Loader=YamlLoader)
thisinfo.update(yamlinfo)
post_metadata_parse(thisinfo)
app.update_fields(yamlinfo)
post_metadata_parse(app)
return (appid, thisinfo)
return app
def parse_txt_metadata(metadatapath):
@ -918,10 +989,10 @@ def parse_txt_metadata(metadatapath):
def add_comments(key):
if not curcomments:
return
thisinfo['comments'][key] = list(curcomments)
app.comments[key] = list(curcomments)
del curcomments[:]
appid, thisinfo = get_default_app_info(metadatapath)
app = get_default_app_info(metadatapath)
metafile = open(metadatapath, "r")
mode = 0
@ -942,7 +1013,7 @@ def parse_txt_metadata(metadatapath):
raise MetaDataException("No commit specified for {0} in {1}"
.format(curbuild['version'], linedesc))
thisinfo['builds'].append(curbuild)
app.builds.append(curbuild)
add_comments('build:' + curbuild['vercode'])
mode = 0
else:
@ -978,21 +1049,20 @@ def parse_txt_metadata(metadatapath):
add_comments(field)
if fieldtype == 'multiline':
mode = 1
thisinfo[field] = []
if value:
raise MetaDataException("Unexpected text on same line as " + field + " in " + linedesc)
elif fieldtype == 'string':
thisinfo[field] = value
app.set_field(field, value)
elif fieldtype == 'list':
thisinfo[field] = split_list_values(value)
app.set_field(field, split_list_values(value))
elif fieldtype == 'build':
if value.endswith("\\"):
mode = 2
buildlines = [value[:-1]]
else:
curbuild = parse_buildline([value])
thisinfo['builds'].append(curbuild)
add_comments('build:' + thisinfo['builds'][-1]['vercode'])
app.builds.append(curbuild)
add_comments('build:' + app.builds[-1]['vercode'])
elif fieldtype == 'buildv2':
curbuild = {}
vv = value.split(',')
@ -1015,15 +1085,15 @@ def parse_txt_metadata(metadatapath):
if line == '.':
mode = 0
else:
thisinfo[field].append(line)
app.append_field(field, line)
elif mode == 2: # Line continuation mode in Build Version
if line.endswith("\\"):
buildlines.append(line[:-1])
else:
buildlines.append(line)
curbuild = parse_buildline(buildlines)
thisinfo['builds'].append(curbuild)
add_comments('build:' + thisinfo['builds'][-1]['vercode'])
app.builds.append(curbuild)
add_comments('build:' + app.builds[-1]['vercode'])
mode = 0
add_comments(None)
@ -1035,34 +1105,34 @@ def parse_txt_metadata(metadatapath):
elif mode == 3:
raise MetaDataException("Unterminated build in " + metafile.name)
post_metadata_parse(thisinfo)
post_metadata_parse(app)
return (appid, thisinfo)
return app
def write_plaintext_metadata(mf, app, w_comment, w_field, w_build):
def w_comments(key):
if key not in app['comments']:
if key not in app.comments:
return
for line in app['comments'][key]:
for line in app.comments[key]:
w_comment(line)
def w_field_always(field, value=None):
if value is None:
value = app[field]
value = app.get_field(field)
w_comments(field)
w_field(field, value)
def w_field_nonempty(field, value=None):
if value is None:
value = app[field]
value = app.get_field(field)
w_comments(field)
if value:
w_field(field, value)
w_field_nonempty('Disabled')
if app['AntiFeatures']:
if app.AntiFeatures:
w_field_always('AntiFeatures')
w_field_nonempty('Provides')
w_field_always('Categories')
@ -1079,19 +1149,19 @@ def write_plaintext_metadata(mf, app, w_comment, w_field, w_build):
w_field_nonempty('Name')
w_field_nonempty('Auto Name')
w_field_always('Summary')
w_field_always('Description', description_txt(app['Description']))
w_field_always('Description', description_txt(app.Description))
mf.write('\n')
if app['Requires Root']:
if app.RequiresRoot:
w_field_always('Requires Root', 'yes')
mf.write('\n')
if app['Repo Type']:
if app.RepoType:
w_field_always('Repo Type')
w_field_always('Repo')
if app['Binaries']:
if app.Binaries:
w_field_always('Binaries')
mf.write('\n')
for build in sorted_builds(app['builds']):
for build in sorted_builds(app.builds):
if build['version'] == "Ignore":
continue
@ -1100,8 +1170,8 @@ def write_plaintext_metadata(mf, app, w_comment, w_field, w_build):
w_build(build)
mf.write('\n')
if app['Maintainer Notes']:
w_field_always('Maintainer Notes', app['Maintainer Notes'])
if app.MaintainerNotes:
w_field_always('Maintainer Notes', app.MaintainerNotes)
mf.write('\n')
w_field_nonempty('Archive Policy')
@ -1111,10 +1181,10 @@ def write_plaintext_metadata(mf, app, w_comment, w_field, w_build):
w_field_nonempty('Vercode Operation')
w_field_nonempty('Update Check Name')
w_field_nonempty('Update Check Data')
if app['Current Version']:
if app.CurrentVersion:
w_field_always('Current Version')
w_field_always('Current Version Code')
if app['No Source Since']:
if app.NoSourceSince:
mf.write('\n')
w_field_always('No Source Since')
w_comments(None)

View File

@ -117,7 +117,7 @@ def main():
sys.exit(1)
app = allapps[appid]
if app.get('Binaries', None):
if app.Binaries is not None:
# It's an app where we build from source, and verify the apk
# contents against a developer's binary, and then publish their

View File

@ -59,7 +59,7 @@ def main():
parser.error("Must give a valid format to --to")
for appid, app in apps.iteritems():
metadatapath = app['metadatapath']
metadatapath = app.metadatapath
base, ext = common.get_extension(metadatapath)
if not options.to and ext not in supported:
logging.info("Ignoring %s file at '%s'" % (ext, metadatapath))

View File

@ -274,10 +274,10 @@ def main():
for appid, app in apps.iteritems():
if app['Disabled']:
if app.Disabled:
logging.info("Skipping %s: disabled" % appid)
continue
if not app['builds']:
if not app.builds:
logging.info("Skipping %s: no builds specified" % appid)
continue
@ -285,15 +285,15 @@ def main():
try:
if app['Repo Type'] == 'srclib':
build_dir = os.path.join('build', 'srclib', app['Repo'])
if app.RepoType == 'srclib':
build_dir = os.path.join('build', 'srclib', app.Repo)
else:
build_dir = os.path.join('build', appid)
# Set up vcs interface and make sure we have the latest code...
vcs = common.getvcs(app['Repo Type'], app['Repo'], build_dir)
vcs = common.getvcs(app.RepoType, app.Repo, build_dir)
for thisbuild in app['builds']:
for thisbuild in app.builds:
if thisbuild['disable']:
logging.info("...skipping version %s - %s" % (

View File

@ -217,9 +217,9 @@ def main():
logging.info("Processing repo types...")
repotypes = Counter()
for app in metaapps:
rtype = app['Repo Type'] or 'none'
rtype = app.RepoType or 'none'
if rtype == 'srclib':
rtype = common.getsrclibvcs(app['Repo'])
rtype = common.getsrclibvcs(app.Repo)
repotypes[rtype] += 1
with open(os.path.join(statsdir, 'repotypes.txt'), 'w') as f:
for rtype, count in repotypes.most_common():
@ -229,7 +229,7 @@ def main():
logging.info("Processing update check modes...")
ucms = Counter()
for app in metaapps:
checkmode = app['Update Check Mode']
checkmode = app.UpdateCheckMode
if checkmode.startswith('RepoManifest/'):
checkmode = checkmode[:12]
if checkmode.startswith('Tags '):
@ -242,7 +242,7 @@ def main():
logging.info("Processing categories...")
ctgs = Counter()
for app in metaapps:
for category in app['Categories']:
for category in app.Categories:
ctgs[category] += 1
with open(os.path.join(statsdir, 'categories.txt'), 'w') as f:
for category, count in ctgs.most_common():
@ -251,9 +251,9 @@ def main():
logging.info("Processing antifeatures...")
afs = Counter()
for app in metaapps:
if app['AntiFeatures'] is None:
if app.AntiFeatures is None:
continue
for antifeature in app['AntiFeatures']:
for antifeature in app.AntiFeatures:
afs[antifeature] += 1
with open(os.path.join(statsdir, 'antifeatures.txt'), 'w') as f:
for antifeature, count in afs.most_common():
@ -263,7 +263,7 @@ def main():
logging.info("Processing licenses...")
licenses = Counter()
for app in metaapps:
license = app['License']
license = app.License
licenses[license] += 1
with open(os.path.join(statsdir, 'licenses.txt'), 'w') as f:
for license, count in licenses.most_common():

View File

@ -94,43 +94,43 @@ def update_wiki(apps, sortedids, apks):
app = apps[appid]
wikidata = ''
if app['Disabled']:
wikidata += '{{Disabled|' + app['Disabled'] + '}}\n'
if 'AntiFeatures' in app:
for af in app['AntiFeatures']:
if app.Disabled:
wikidata += '{{Disabled|' + app.Disabled + '}}\n'
if app.AntiFeatures:
for af in app.AntiFeatures:
wikidata += '{{AntiFeature|' + af + '}}\n'
if app['Requires Root']:
if app.RequiresRoot:
requiresroot = 'Yes'
else:
requiresroot = 'No'
wikidata += '{{App|id=%s|name=%s|added=%s|lastupdated=%s|source=%s|tracker=%s|web=%s|changelog=%s|donate=%s|flattr=%s|bitcoin=%s|litecoin=%s|license=%s|root=%s}}\n' % (
appid,
app['Name'],
time.strftime('%Y-%m-%d', app['added']) if 'added' in app else '',
time.strftime('%Y-%m-%d', app['lastupdated']) if 'lastupdated' in app else '',
app['Source Code'],
app['Issue Tracker'],
app['Web Site'],
app['Changelog'],
app['Donate'],
app['FlattrID'],
app['Bitcoin'],
app['Litecoin'],
app['License'],
app.Name,
time.strftime('%Y-%m-%d', app.added) if app.added else '',
time.strftime('%Y-%m-%d', app.lastupdated) if app.lastupdated else '',
app.SourceCode,
app.IssueTracker,
app.WebSite,
app.Changelog,
app.Donate,
app.FlattrID,
app.Bitcoin,
app.Litecoin,
app.License,
requiresroot)
if app['Provides']:
wikidata += "This app provides: %s" % ', '.join(app['Summary'].split(','))
if app.Provides:
wikidata += "This app provides: %s" % ', '.join(app.Summary.split(','))
wikidata += app['Summary']
wikidata += app.Summary
wikidata += " - [https://f-droid.org/repository/browse/?fdid=" + appid + " view in repository]\n\n"
wikidata += "=Description=\n"
wikidata += metadata.description_wiki(app['Description']) + "\n"
wikidata += metadata.description_wiki(app.Description) + "\n"
wikidata += "=Maintainer Notes=\n"
if 'Maintainer Notes' in app:
wikidata += metadata.description_wiki(app['Maintainer Notes']) + "\n"
if app.MaintainerNotes:
wikidata += metadata.description_wiki(app.MaintainerNotes) + "\n"
wikidata += "\nMetadata: [https://gitlab.com/fdroid/fdroiddata/blob/master/metadata/{0}.txt current] [https://gitlab.com/fdroid/fdroiddata/commits/master/metadata/{0}.txt history]\n".format(appid)
# Get a list of all packages for this application...
@ -140,13 +140,13 @@ def update_wiki(apps, sortedids, apks):
buildfails = False
for apk in apks:
if apk['id'] == appid:
if str(apk['versioncode']) == app['Current Version Code']:
if str(apk['versioncode']) == app.CurrentVersionCode:
gotcurrentver = True
apklist.append(apk)
# Include ones we can't build, as a special case...
for thisbuild in app['builds']:
for thisbuild in app.builds:
if thisbuild['disable']:
if thisbuild['vercode'] == app['Current Version Code']:
if thisbuild['vercode'] == app.CurrentVersionCode:
cantupdate = True
# TODO: Nasty: vercode is a string in the build, and an int elsewhere
apklist.append({'versioncode': int(thisbuild['vercode']),
@ -165,7 +165,7 @@ def update_wiki(apps, sortedids, apks):
'version': thisbuild['version'],
'buildproblem': "The build for this version appears to have failed. Check the [[{0}/lastbuild_{1}|build log]].".format(appid, thisbuild['vercode']),
})
if app['Current Version Code'] == '0':
if app.CurrentVersionCode == '0':
cantupdate = True
# Sort with most recent first...
apklist = sorted(apklist, key=lambda apk: apk['versioncode'], reverse=True)
@ -177,13 +177,13 @@ def update_wiki(apps, sortedids, apks):
wikidata += "We don't have the current version of this app."
else:
wikidata += "We have the current version of this app."
wikidata += " (Check mode: " + app['Update Check Mode'] + ") "
wikidata += " (Auto-update mode: " + app['Auto Update Mode'] + ")\n\n"
if len(app['No Source Since']) > 0:
wikidata += "This application has partially or entirely been missing source code since version " + app['No Source Since'] + ".\n\n"
if len(app['Current Version']) > 0:
wikidata += "The current (recommended) version is " + app['Current Version']
wikidata += " (version code " + app['Current Version Code'] + ").\n\n"
wikidata += " (Check mode: " + app.UpdateCheckMode + ") "
wikidata += " (Auto-update mode: " + app.AutoUpdateMode + ")\n\n"
if len(app.NoSourceSince) > 0:
wikidata += "This application has partially or entirely been missing source code since version " + app.NoSourceSince + ".\n\n"
if len(app.CurrentVersion) > 0:
wikidata += "The current (recommended) version is " + app.CurrentVersion
wikidata += " (version code " + app.CurrentVersionCode + ").\n\n"
validapks = 0
for apk in apklist:
wikidata += "==" + apk['version'] + "==\n"
@ -200,21 +200,21 @@ def update_wiki(apps, sortedids, apks):
wikidata += "Version code: " + str(apk['versioncode']) + '\n'
wikidata += '\n[[Category:' + wikicat + ']]\n'
if len(app['No Source Since']) > 0:
if len(app.NoSourceSince) > 0:
wikidata += '\n[[Category:Apps missing source code]]\n'
if validapks == 0 and not app['Disabled']:
if validapks == 0 and not app.Disabled:
wikidata += '\n[[Category:Apps with no packages]]\n'
if cantupdate and not app['Disabled']:
if cantupdate and not app.Disabled:
wikidata += "\n[[Category:Apps we can't update]]\n"
if buildfails and not app['Disabled']:
if buildfails and not app.Disabled:
wikidata += "\n[[Category:Apps with failing builds]]\n"
elif not gotcurrentver and not cantupdate and not app['Disabled'] and app['Update Check Mode'] != "Static":
elif not gotcurrentver and not cantupdate and not app.Disabled and app.UpdateCheckMode != "Static":
wikidata += '\n[[Category:Apps to Update]]\n'
if app['Disabled']:
if app.Disabled:
wikidata += '\n[[Category:Apps that are disabled]]\n'
if app['Update Check Mode'] == 'None' and not app['Disabled']:
if app.UpdateCheckMode == 'None' and not app.Disabled:
wikidata += '\n[[Category:Apps with no update check]]\n'
for appcat in app['Categories']:
for appcat in app.Categories:
wikidata += '\n[[Category:{0}]]\n'.format(appcat)
# We can't have underscores in the page name, even if they're in
@ -231,7 +231,7 @@ def update_wiki(apps, sortedids, apks):
# Make a redirect from the name to the ID too, unless there's
# already an existing page with the name and it isn't a redirect.
noclobber = False
apppagename = app['Name'].replace('_', ' ')
apppagename = app.Name.replace('_', ' ')
apppagename = apppagename.replace('{', '')
apppagename = apppagename.replace('}', ' ')
apppagename = apppagename.replace(':', ' ')
@ -290,7 +290,7 @@ def delete_disabled_builds(apps, apkcache, repodirs):
:param repodirs: the repo directories to process
"""
for appid, app in apps.iteritems():
for build in app['builds']:
for build in app.builds:
if not build['disable']:
continue
apkfilename = appid + '_' + str(build['vercode']) + '.apk'
@ -805,7 +805,7 @@ def make_index(apps, sortedids, apks, repodir, archive, categories):
for appid in sortedids:
app = apps[appid]
if app['Disabled'] is not None:
if app.Disabled is not None:
continue
# Get a list of the apks for this app...
@ -818,57 +818,57 @@ def make_index(apps, sortedids, apks, repodir, archive, categories):
continue
apel = doc.createElement("application")
apel.setAttribute("id", app['id'])
apel.setAttribute("id", app.id)
root.appendChild(apel)
addElement('id', app['id'], doc, apel)
if 'added' in app:
addElement('added', time.strftime('%Y-%m-%d', app['added']), doc, apel)
if 'lastupdated' in app:
addElement('lastupdated', time.strftime('%Y-%m-%d', app['lastupdated']), doc, apel)
addElement('name', app['Name'], doc, apel)
addElement('summary', app['Summary'], doc, apel)
if app['icon']:
addElement('icon', app['icon'], doc, apel)
addElement('id', app.id, doc, apel)
if app.added:
addElement('added', time.strftime('%Y-%m-%d', app.added), doc, apel)
if app.lastupdated:
addElement('lastupdated', time.strftime('%Y-%m-%d', app.lastupdated), doc, apel)
addElement('name', app.Name, doc, apel)
addElement('summary', app.Summary, doc, apel)
if app.icon:
addElement('icon', app.icon, doc, apel)
def linkres(appid):
if appid in apps:
return ("fdroid.app:" + appid, apps[appid]['Name'])
return ("fdroid.app:" + appid, apps[appid].Name)
raise MetaDataException("Cannot resolve app id " + appid)
addElement('desc',
metadata.description_html(app['Description'], linkres),
metadata.description_html(app.Description, linkres),
doc, apel)
addElement('license', app['License'], doc, apel)
if 'Categories' in app and app['Categories']:
addElement('categories', ','.join(app["Categories"]), doc, apel)
addElement('license', app.License, doc, apel)
if app.Categories:
addElement('categories', ','.join(app.Categories), doc, apel)
# We put the first (primary) category in LAST, which will have
# the desired effect of making clients that only understand one
# category see that one.
addElement('category', app["Categories"][0], doc, apel)
addElement('web', app['Web Site'], doc, apel)
addElement('source', app['Source Code'], doc, apel)
addElement('tracker', app['Issue Tracker'], doc, apel)
addElementNonEmpty('changelog', app['Changelog'], doc, apel)
addElementNonEmpty('donate', app['Donate'], doc, apel)
addElementNonEmpty('bitcoin', app['Bitcoin'], doc, apel)
addElementNonEmpty('litecoin', app['Litecoin'], doc, apel)
addElementNonEmpty('flattr', app['FlattrID'], doc, apel)
addElement('category', app.Categories[0], doc, apel)
addElement('web', app.WebSite, doc, apel)
addElement('source', app.SourceCode, doc, apel)
addElement('tracker', app.IssueTracker, doc, apel)
addElementNonEmpty('changelog', app.Changelog, doc, apel)
addElementNonEmpty('donate', app.Donate, doc, apel)
addElementNonEmpty('bitcoin', app.Bitcoin, doc, apel)
addElementNonEmpty('litecoin', app.Litecoin, doc, apel)
addElementNonEmpty('flattr', app.FlattrID, doc, apel)
# These elements actually refer to the current version (i.e. which
# one is recommended. They are historically mis-named, and need
# changing, but stay like this for now to support existing clients.
addElement('marketversion', app['Current Version'], doc, apel)
addElement('marketvercode', app['Current Version Code'], doc, apel)
addElement('marketversion', app.CurrentVersion, doc, apel)
addElement('marketvercode', app.CurrentVersionCode, doc, apel)
if app['AntiFeatures']:
af = app['AntiFeatures']
if app.AntiFeatures:
af = app.AntiFeatures
if af:
addElementNonEmpty('antifeatures', ','.join(af), doc, apel)
if app['Provides']:
pv = app['Provides'].split(',')
if app.Provides:
pv = app.Provides.split(',')
addElementNonEmpty('provides', ','.join(pv), doc, apel)
if app['Requires Root']:
if app.RequiresRoot:
addElement('requirements', 'root', doc, apel)
# Sort the apk list into version order, just so the web site
@ -888,7 +888,7 @@ def make_index(apps, sortedids, apks, repodir, archive, categories):
# find the APK for the "Current Version"
if current_version_code < apk['versioncode']:
current_version_code = apk['versioncode']
if current_version_code < int(app['Current Version Code']):
if current_version_code < int(app.CurrentVersionCode):
current_version_file = apk['apkname']
apkel = doc.createElement("package")
@ -920,8 +920,8 @@ def make_index(apps, sortedids, apks, repodir, archive, categories):
if current_version_file is not None \
and config['make_current_version_link'] \
and repodir == 'repo': # only create these
sanitized_name = re.sub('''[ '"&%?+=/]''', '',
app[config['current_version_name_source']])
namefield = config['current_version_name_source']
sanitized_name = re.sub('''[ '"&%?+=/]''', '', app.get_field(namefield))
apklinkname = sanitized_name + '.apk'
current_version_path = os.path.join(repodir, current_version_file)
if os.path.islink(apklinkname):
@ -996,8 +996,8 @@ def archive_old_apks(apps, apks, archapks, repodir, archivedir, defaultkeepversi
for appid, app in apps.iteritems():
if app['Archive Policy']:
keepversions = int(app['Archive Policy'][:-9])
if app.ArchivePolicy:
keepversions = int(app.ArchivePolicy[:-9])
else:
keepversions = defaultkeepversions
@ -1163,7 +1163,7 @@ def main():
# Generate a list of categories...
categories = set()
for app in apps.itervalues():
categories.update(app['Categories'])
categories.update(app.Categories)
# Read known apks data (will be updated and written back when we've finished)
knownapks = common.KnownApks()
@ -1234,8 +1234,6 @@ def main():
# same time.
for appid, app in apps.iteritems():
bestver = 0
added = None
lastupdated = None
for apk in apks + archapks:
if apk['id'] == appid:
if apk['versioncode'] > bestver:
@ -1243,34 +1241,30 @@ def main():
bestapk = apk
if 'added' in apk:
if not added or apk['added'] < added:
added = apk['added']
if not lastupdated or apk['added'] > lastupdated:
lastupdated = apk['added']
if not app.added or apk['added'] < app.added:
app.added = apk['added']
if not app.lastupdated or apk['added'] > app.lastupdated:
app.lastupdated = apk['added']
if added:
app['added'] = added
else:
if not app.added:
logging.debug("Don't know when " + appid + " was added")
if lastupdated:
app['lastupdated'] = lastupdated
else:
if not app.lastupdated:
logging.debug("Don't know when " + appid + " was last updated")
if bestver == 0:
if app['Name'] is None:
app['Name'] = app['Auto Name'] or appid
app['icon'] = None
if app.Name is None:
app.Name = app.AutoName or appid
app.icon = None
logging.debug("Application " + appid + " has no packages")
else:
if app['Name'] is None:
app['Name'] = bestapk['name']
app['icon'] = bestapk['icon'] if 'icon' in bestapk else None
if app.Name is None:
app.Name = bestapk['name']
app.icon = bestapk['icon'] if 'icon' in bestapk else None
# Sort the app list by name, then the web site doesn't have to by default.
# (we had to wait until we'd scanned the apks to do this, because mostly the
# name comes from there!)
sortedids = sorted(apps.iterkeys(), key=lambda appid: apps[appid]['Name'].upper())
sortedids = sorted(apps.iterkeys(), key=lambda appid: apps[appid].Name.upper())
# APKs are placed into multiple repos based on the app package, providing
# per-app subscription feeds for nightly builds and things like it
@ -1309,10 +1303,10 @@ def main():
appid = line.rstrip()
data += appid + "\t"
app = apps[appid]
data += app['Name'] + "\t"
if app['icon'] is not None:
data += app['icon'] + "\t"
data += app['License'] + "\n"
data += app.Name + "\t"
if app.icon is not None:
data += app.icon + "\t"
data += app.License + "\n"
with open(os.path.join(repodirs[0], 'latestapps.dat'), 'w') as f:
f.write(data)

View File

@ -116,8 +116,8 @@ class CommonTest(unittest.TestCase):
config['sdk_path'] = os.getenv('ANDROID_HOME')
config['build_tools'] = 'FAKE_BUILD_TOOLS_VERSION'
fdroidserver.common.config = config
app = dict()
app['id'] = 'org.fdroid.froid'
app = fdroidserver.metadata.App()
app.id = 'org.fdroid.froid'
build = dict(fdroidserver.metadata.flag_defaults)
build['commit'] = 'master'
build['forceversion'] = True

View File

@ -30,12 +30,12 @@ class ImportTest(unittest.TestCase):
fdroidserver.common.config['sdk_path'] = '/fake/path/to/android-sdk'
url = 'https://gitlab.com/fdroid/fdroidclient'
appid, app = fdroidserver.metadata.get_default_app_info()
app['Update Check Mode'] = "Tags"
app = fdroidserver.metadata.get_default_app_info()
app.UpdateCheckMode = "Tags"
root_dir, src_dir = import_proxy.get_metadata_from_url(app, url)
self.assertEquals(app['Repo Type'], 'git')
self.assertEquals(app['Web Site'], 'https://gitlab.com/fdroid/fdroidclient')
self.assertEquals(app['Repo'], 'https://gitlab.com/fdroid/fdroidclient.git')
self.assertEquals(app.RepoType, 'git')
self.assertEquals(app.WebSite, 'https://gitlab.com/fdroid/fdroidclient')
self.assertEquals(app.Repo, 'https://gitlab.com/fdroid/fdroidclient.git')
if __name__ == "__main__":

View File

@ -39,10 +39,15 @@ class MetadataTest(unittest.TestCase):
apps = fdroidserver.metadata.read_metadata(xref=True)
for appid in ('org.smssecure.smssecure', 'org.adaway', 'net.osmand.plus', 'org.videolan.vlc'):
with open(os.path.join('metadata', appid + '.pickle'), 'r') as f:
app = apps[appid]
savepath = os.path.join('metadata', appid + '.pickle')
self.assertTrue(appid in apps)
with open(savepath, 'r') as f:
frompickle = pickle.load(f)
self.assertTrue(appid in apps.keys())
self.assertEquals(apps[appid], frompickle)
frommeta = app.field_dict()
self.assertEquals(frommeta, frompickle)
# with open(savepath, 'wb') as f:
# pickle.dump(app, f)
if __name__ == "__main__":

View File

@ -491,12 +491,16 @@ S'metadata/net.osmand.plus.xml'
p178
sS'Disabled'
p179
NsS'Update Check Name'
NsS'added'
p180
NsS'Vercode Operation'
NsS'lastupdated'
p181
NsS'Current Version'
NsS'Update Check Name'
p182
S'1.9.5'
NsS'Vercode Operation'
p183
NsS'Current Version'
p184
S'1.9.5'
p185
s.

View File

@ -2290,4 +2290,8 @@ NsS'Update Check Name'
p483
NsS'Vercode Operation'
p484
Ns.
NsS'added'
p485
NsS'lastupdated'
p486
Ns.

View File

@ -772,4 +772,8 @@ NsS'Update Check Name'
p227
NsS'Vercode Operation'
p228
Ns.
NsS'added'
p229
NsS'lastupdated'
p230
Ns.

View File

@ -5610,14 +5610,18 @@ S'metadata/org.videolan.vlc.yaml'
p1519
sS'Disabled'
p1520
NsS'Update Check Name'
NsS'added'
p1521
NsS'Vercode Operation'
NsS'lastupdated'
p1522
S'%c + 5'
NsS'Update Check Name'
p1523
sS'Current Version'
NsS'Vercode Operation'
p1524
S'1.2.6'
S'%c + 5'
p1525
sS'Current Version'
p1526
S'1.2.6'
p1527
s.