1# -*- coding: utf-8 -*- 2# 3# Copyright 2007-2011 Brecht Machiels 4# Copyright 2009-2010 Chris Roberts 5# Copyright 2009-2011 Scott McCreary 6# Copyright 2009 Alexander Deynichenko 7# Copyright 2009 HaikuBot (aka RISC) 8# Copyright 2010-2011 Jack Laxson (Jrabbit) 9# Copyright 2011 Ingo Weinhold 10# Copyright 2013 Oliver Tappe 11# Distributed under the terms of the MIT License. 12 13# -- Modules ------------------------------------------------------------------ 14 15import codecs 16import datetime 17import json 18import os 19import shutil 20from functools import cmp_to_key 21from subprocess import STDOUT, CalledProcessError, check_call, check_output 22 23from .BuildPlatform import buildPlatform 24from .ConfigParser import ConfigParser 25from .Configuration import Configuration 26from .Options import getOption 27from .RecipeTypes import Architectures, Status 28from .ShellScriptlets import getScriptletPrerequirements 29from .Utils import (ensureCommandIsAvailable, escapeForPackageInfo, 30 haikuporterRepoUrl, haikuportsRepoUrl, info, 31 naturalCompare, sysExit, touchFile, warn) 32 33# -- The supported package types ---------------------------------------------- 34 35class PackageType(str): 36 DEBUG_INFO = 'debuginfo' 37 DEVELOPMENT = 'devel' 38 DOCUMENTATION = 'doc' 39 GENERAL = 'general' 40 SOURCE = 'source' 41 42 @staticmethod 43 def byName(name): 44 """Lookup the type by name""" 45 46 if name == PackageType.DEBUG_INFO: 47 return PackageType.DEBUG_INFO 48 elif name == PackageType.DEVELOPMENT: 49 return PackageType.DEVELOPMENT 50 elif name == PackageType.DOCUMENTATION: 51 return PackageType.DOCUMENTATION 52 elif name == PackageType.SOURCE: 53 return PackageType.SOURCE 54 else: 55 return PackageType.GENERAL 56 57 58# -- Base class for all packages ---------------------------------------------- 59 60class Package(object): 61 def __init__(self, packageType, name, port, recipeKeys, policy, 62 isRiggedSourcePackage=False): 63 self.type = packageType 64 if 'PACKAGE_NAME' in recipeKeys: 65 self.name = recipeKeys['PACKAGE_NAME'] 66 if not self.name: 67 self.name = name 68 if 'PACKAGE_VERSION' in recipeKeys: 69 self.version = recipeKeys['PACKAGE_VERSION'] 70 if not self.version: 71 self.version = port.version 72 self.revision = port.revision 73 self.secondaryArchitecture = port.secondaryArchitecture 74 75 self.workDir = port.workDir 76 self.buildPackageDir = port.buildPackageDir 77 self.packagingDir = port.packagingBaseDir + '/' + self.name 78 self.hpkgDir = port.hpkgDir 79 self.recipeKeys = recipeKeys 80 self.policy = policy 81 82 self.versionedName = self.name + '-' + self.version 83 self.fullVersion = self.version + '-' + self.revision 84 self.revisionedName = self.name + '-' + self.fullVersion 85 86 self.packageInfoName = self.versionedName + '.PackageInfo' 87 self.dependencyInfoName = self.versionedName + '.DependencyInfo' 88 89 self.isRiggedSourcePackage = isRiggedSourcePackage 90 91 if packageType == PackageType.SOURCE: 92 if self.isRiggedSourcePackage: 93 # let rigged source packages use the target architecture, as 94 # (potentially) they have been patched specifically for that 95 # target architecture 96 self.architecture = port.targetArchitecture 97 else: 98 self.architecture = Architectures.SOURCE 99 elif ((port.secondaryArchitecture is not None and 100 port.secondaryArchitecture in self.recipeKeys['SECONDARY_ARCHITECTURES']) or 101 port.targetArchitecture in self.recipeKeys['ARCHITECTURES']): 102 # if this package can be built for the current target architecture, 103 # we do so and create a package for the host architecture (which 104 # is the same as the target architecture, except for "_cross_" 105 # packages, which are built for the host on which the build runs.) 106 self.architecture = port.hostArchitecture 107 elif Architectures.ANY in self.recipeKeys['ARCHITECTURES']: 108 self.architecture = Architectures.ANY 109 else: 110 sysExit('package %s cannot be built for architecture %s' 111 % (self.versionedName, port.targetArchitecture)) 112 113 self.fullVersionedName = self.versionedName + '-' + self.architecture 114 self.fullRevisionedName = self.revisionedName + '-' + self.architecture 115 self.hpkgName = self.fullRevisionedName + '.hpkg' 116 117 self.targetMachineTripleAsName \ 118 = port.shellVariables.get('targetMachineTripleAsName', '') 119 120 self.buildPackage = None 121 self.activeBuildPackage = None 122 123 def getStatusOnArchitecture(self, architecture): 124 """Return the status of this package on the given architecture (which 125 must be a hardware architecture, i.e. not ANY or SOURCE)""" 126 127 if architecture in self.recipeKeys['ARCHITECTURES']: 128 return self.recipeKeys['ARCHITECTURES'][architecture] 129 elif Architectures.ANY in self.recipeKeys['ARCHITECTURES']: 130 return self.recipeKeys['ARCHITECTURES'][Architectures.ANY] 131 elif Architectures.SOURCE in self.recipeKeys['ARCHITECTURES']: 132 return Status.STABLE 133 return Status.UNSUPPORTED 134 135 def isBuildableOnArchitecture(self, architecture): 136 """Returns whether or not this package is buildable on the given 137 architecture""" 138 status = self.getStatusOnArchitecture(architecture) 139 allowUntested = Configuration.shallAllowUntested() 140 return (status == Status.STABLE 141 or (status == Status.UNTESTED and allowUntested)) 142 143 def getStatusOnSecondaryArchitecture(self, architecture, 144 secondaryArchitecture): 145 # check the secondary architecture 146 if secondaryArchitecture: 147 secondaryStatus = Status.UNSUPPORTED 148 secondaryArchitectures = self.recipeKeys['SECONDARY_ARCHITECTURES'] 149 if secondaryArchitecture in secondaryArchitectures: 150 secondaryStatus = secondaryArchitectures[secondaryArchitecture] 151 152 return secondaryStatus 153 else: 154 return self.getStatusOnArchitecture(architecture) 155 156 def isBuildableOnSecondaryArchitecture(self, architecture, 157 secondaryArchitecture, forceAllowUnstable=False): 158 status = self.getStatusOnSecondaryArchitecture(architecture, 159 secondaryArchitecture) 160 allowUntested = Configuration.shallAllowUntested() 161 return (status == Status.STABLE 162 or (status == Status.UNTESTED and allowUntested) 163 or forceAllowUnstable) 164 165 def dependencyInfoFile(self, repositoryPath): 166 return os.path.join(repositoryPath, self.dependencyInfoName) 167 168 def writeDependencyInfoIntoRepository(self, repositoryPath): 169 """Write a DependencyInfo-file for this package into the repository""" 170 171 requires = ['BUILD_REQUIRES', 'BUILD_PREREQUIRES', 'REQUIRES', 172 'TEST_REQUIRES'] 173 self.generateDependencyInfo(self.dependencyInfoFile(repositoryPath), 174 requires) 175 176 def removeDependencyInfoFromRepository(self, repositoryPath): 177 """Remove DependencyInfo-file from repository, if it's there""" 178 179 dependencyInfoFile = self.dependencyInfoFile(repositoryPath) 180 if os.path.exists(dependencyInfoFile): 181 os.remove(dependencyInfoFile) 182 183 def obsoletePackage(self, packagesPath): 184 """Moves the package-file into the 'obsolete' sub-directory""" 185 186 obsoleteDir = packagesPath + '/.obsolete' 187 packageFile = packagesPath + '/' + self.hpkgName 188 if os.path.exists(packageFile): 189 print('\tobsoleting package ' + self.hpkgName) 190 obsoletePackage = obsoleteDir + '/' + self.hpkgName 191 if not os.path.exists(obsoleteDir): 192 os.mkdir(obsoleteDir) 193 os.rename(packageFile, obsoletePackage) 194 195 def generateDependencyInfoWithoutProvides(self, dependencyInfoPath, 196 requiresToUse): 197 """Create a .DependencyInfo file that doesn't include any provides 198 except for the one matching the package name""" 199 200 self._generateDependencyInfo(dependencyInfoPath, requiresToUse, 201 fakeProvides=True, architectures=Architectures.ANY) 202 203 def generateDependencyInfo(self, dependencyInfoPath, requiresToUse): 204 """Create a .DependencyInfo file (used for dependency resolving)""" 205 206 self._generateDependencyInfo(dependencyInfoPath, requiresToUse) 207 208 def adjustToChroot(self): 209 """Adjust directories to chroot()-ed environment""" 210 211 # adjust all relevant directories 212 pathLengthToCut = len(self.workDir) 213 self.buildPackageDir = self.buildPackageDir[pathLengthToCut:] 214 self.packagingDir = self.packagingDir[pathLengthToCut:] 215 self.hpkgDir = self.hpkgDir[pathLengthToCut:] 216 self.workDir = '/' 217 218 def populatePackagingDir(self, port): 219 """Prefill packaging directory with stuff from the outside""" 220 221 licenseDir = port.baseDir + '/licenses' 222 if os.path.exists(licenseDir): 223 shutil.copytree(licenseDir, self.packagingDir + '/data/licenses') 224 225 def makeHpkg(self, requiresUpdater): 226 """Create a package suitable for distribution""" 227 228 if (requiresUpdater and self.type != PackageType.SOURCE): 229 requiresList = self.recipeKeys['REQUIRES'] 230 self.recipeKeys['UPDATED_REQUIRES'] \ 231 = requiresUpdater.updateRequiresList(requiresList) 232 requiresName = 'UPDATED_REQUIRES' 233 else: 234 requiresName = 'REQUIRES' 235 236 self._generatePackageInfo(self.packagingDir + '/.PackageInfo', 237 [requiresName], getOption('quiet'), False, True, self.architecture) 238 239 packageFile = self.hpkgDir + '/' + self.hpkgName 240 if os.path.exists(packageFile): 241 os.remove(packageFile) 242 243 # mimeset the files that shall go into the package 244 info('mimesetting files for package ' + self.hpkgName + ' ...') 245 dataDir = os.path.join(self.packagingDir, 'data') 246 mimeDBDir = os.path.join(dataDir, 'mime_db') 247 check_call([Configuration.getMimesetCommand(), '--all', '--mimedb', 248 'data/mime_db', '--mimedb', 249 buildPlatform.getSystemMimeDbDirectory(), '.'], 250 cwd=self.packagingDir) 251 252 # If data/mime_db is empty, remove it. 253 if not os.listdir(mimeDBDir): 254 os.rmdir(mimeDBDir) 255 if not os.listdir(dataDir): 256 os.rmdir(dataDir) 257 else: 258 t = datetime.datetime(2001, 8, 18, 0, 0) 259 for superMimeType in os.listdir(mimeDBDir): 260 touchFile(mimeDBDir + "/" + superMimeType, t) 261 262 # Create the package 263 info('creating package ' + self.hpkgName + ' ...') 264 output = check_output([Configuration.getPackageCommand(), 'create', packageFile], 265 cwd=self.packagingDir).decode('utf-8') 266 info(output) 267 # policy check 268 self.policy.checkPackage(self, packageFile) 269 270 # Clean up after ourselves 271 shutil.rmtree(self.packagingDir) 272 273 def createBuildPackage(self): 274 """Create the build package""" 275 276 # create a package info for a build package 277 buildPackageInfo = (self.buildPackageDir + '/' + self.revisionedName 278 + '-build.PackageInfo') 279 self._generatePackageInfo(buildPackageInfo, 280 ['REQUIRES', 'BUILD_REQUIRES', 'BUILD_PREREQUIRES'], True, False, 281 False, self.architecture) 282 283 # create the build package 284 buildPackage = (self.buildPackageDir + '/' + self.revisionedName 285 + '-build.hpkg') 286 cmdlineArgs = [Configuration.getPackageCommand(), 'create', '-bi', 287 buildPackageInfo, '-I', self.packagingDir, buildPackage] 288 if getOption('quiet'): 289 cmdlineArgs.insert(2, '-q') 290 try: 291 output = check_output(cmdlineArgs, stderr=STDOUT).decode('utf-8') 292 except CalledProcessError as exception: 293 raise Exception('failure creating the build package: ' 294 + exception.output[:-1].decode('utf-8')) 295 info(output) 296 self.buildPackage = buildPackage 297 os.remove(buildPackageInfo) 298 299 def activateBuildPackage(self): 300 """Activate the build package""" 301 302 self.activeBuildPackage = buildPlatform.activateBuildPackage( 303 self.workDir, self.buildPackage, self.revisionedName) 304 305 def removeBuildPackage(self): 306 """Deactivate and remove the build package""" 307 308 if self.activeBuildPackage: 309 buildPlatform.deactivateBuildPackage(self.workDir, 310 self.activeBuildPackage, self.revisionedName) 311 self.activeBuildPackage = None 312 if self.buildPackage and os.path.exists(self.buildPackage): 313 os.remove(self.buildPackage) 314 self.buildPackage = None 315 316 def _generatePackageInfo(self, packageInfoPath, requiresToUse, quiet, 317 fakeEmptyProvides, withActivationActions, architecture): 318 """Create a .PackageInfo file for inclusion in a package or for 319 dependency resolving""" 320 321 if not architecture: 322 architecture = self.architecture 323 324 # If it exists, remove the file first. Otherwise we might write to the 325 # wrong file, if it is a symlink. 326 if os.path.exists(packageInfoPath): 327 os.remove(packageInfoPath) 328 329 with codecs.open(packageInfoPath, 'w', 'utf-8') as infoFile: 330 if fakeEmptyProvides: 331 infoFile.write('name\t\t\tfaked_' + self.name + '\n') 332 else: 333 infoFile.write('name\t\t\t' + self.name + '\n') 334 infoFile.write('version\t\t\t' + self.fullVersion + '\n') 335 infoFile.write('architecture\t\t' + architecture + '\n') 336 infoFile.write('summary\t\t\t"' 337 + escapeForPackageInfo(self.recipeKeys['SUMMARY']) 338 + '"\n' 339 ) 340 341 infoFile.write('description\t\t"') 342 infoFile.write( 343 escapeForPackageInfo('\n'.join(self.recipeKeys['DESCRIPTION']))) 344 infoFile.write('"\n') 345 346 infoFile.write('packager\t\t"' + Configuration.getPackager() + '"\n') 347 infoFile.write('vendor\t\t\t"' + Configuration.getVendor() + '"\n') 348 349 # These keys aren't mandatory so we need to check if they exist 350 if self.recipeKeys['LICENSE']: 351 infoFile.write('licenses {\n') 352 for aLicense in self.recipeKeys['LICENSE']: 353 infoFile.write('\t"' + aLicense + '"\n') 354 infoFile.write('}\n') 355 356 if self.recipeKeys['COPYRIGHT']: 357 infoFile.write('copyrights {\n') 358 for aCopyright in self.recipeKeys['COPYRIGHT']: 359 infoFile.write('\t"' + aCopyright + '"\n') 360 infoFile.write('}\n') 361 362 requires = [] 363 for requiresKey in requiresToUse: 364 if requiresKey == 'SCRIPTLET_PREREQUIRES': 365 # Add prerequirements for executing chroot scriptlets. 366 # For cross-built packages, pass in the target machine name, 367 # but take care to not do that for packages that implement 368 # the cross-building themselves (i.e. binutils and gcc), 369 # as those are running in the context of the build machine. 370 targetMachineTripleAsName = self.targetMachineTripleAsName 371 if (Configuration.isCrossBuildRepository() 372 and '_cross_' in self.name): 373 targetMachineTripleAsName = '' 374 requiresForKey = getScriptletPrerequirements( 375 targetMachineTripleAsName) 376 else: 377 requiresForKey = self.recipeKeys[requiresKey] 378 for require in requiresForKey: 379 if require not in requires: 380 requires.append(require) 381 382 if fakeEmptyProvides: 383 infoFile.write('provides {\n\tfaked_' + self.name + ' = ' 384 + self.version + '\n}\n') 385 else: 386 self._writePackageInfoListByKey(infoFile, 'PROVIDES', 387 'provides') 388 self._writePackageInfoList(infoFile, requires, 'requires') 389 self._writePackageInfoListByKey(infoFile, 'SUPPLEMENTS', 390 'supplements') 391 self._writePackageInfoListByKey(infoFile, 'CONFLICTS', 'conflicts') 392 self._writePackageInfoListByKey(infoFile, 'FRESHENS', 'freshens') 393 self._writePackageInfoListByKey(infoFile, 'REPLACES', 'replaces') 394 395 self._writePackageInfoListQuotePaths(infoFile, 396 self.recipeKeys['HOMEPAGE'], 'urls') 397 398 if withActivationActions: 399 self._writePackageInfoListQuotePaths(infoFile, 400 self.recipeKeys['GLOBAL_WRITABLE_FILES'], 401 'global-writable-files') 402 self._writePackageInfoListQuotePaths(infoFile, 403 self.recipeKeys['USER_SETTINGS_FILES'], 404 'user-settings-files') 405 self._writePackageInfoListByKey(infoFile, 'PACKAGE_USERS', 406 'users') 407 self._writePackageInfoListByKey(infoFile, 'PACKAGE_GROUPS', 408 'groups') 409 self._writePackageInfoListQuotePaths(infoFile, 410 self.recipeKeys['POST_INSTALL_SCRIPTS'], 411 'post-install-scripts') 412 413 # Generate SourceURL lines for all ports, regardless of license. 414 # Re-use the download URLs, as specified in the recipe. 415 infoFile.write('source-urls {\n') 416 for index in sorted(list(self.recipeKeys['SOURCE_URI'].keys()), 417 key=cmp_to_key(naturalCompare)): 418 uricount = 1 419 for uri in self.recipeKeys['SOURCE_URI'][index]: 420 if 'file://' in uri: 421 # skip local URIs 422 continue 423 424 if uricount < 2: 425 infoFile.write('# Download\n') 426 infoFile.write('\t"' + uri + '"\n') 427 else: 428 infoFile.write('# Location ' + str(uricount) + '\n') 429 infoFile.write('\t"' + uri + '"\n') 430 uricount += 1 431 432 # TODO: fix or drop the following URLs 433 # Point directly to the file in subversion. 434 #recipeurl_base = ('http://ports.haiku-files.org/' 435 # + 'svn/haikuports/trunk/' + self.category + '/' 436 # + self.name) 437 # 438 #recipeurl = (recipeurl_base + '/' + self.name+ '-' + self.version 439 # + '.recipe') 440 441 #infoFile.write('\t"Port-file <' + recipeurl + '>"\n') 442 #patchFilePath = (self.patchesDir + '/' + self.name + '-' 443 # + self.version + '.patch') 444 #if os.path.exists(patchFilePath): 445 # patchurl = (recipeurl_base + '/patches/' + self.name + '-' 446 # + self.version + '.patch') 447 # infoFile.write('\t"Patches <' + patchurl + '>"\n') 448 449 infoFile.write('}\n') 450 451 if not quiet: 452 with codecs.open(packageInfoPath, 'r', 'utf-8') as infoFile: 453 info(infoFile.read()) 454 455 def _writePackageInfoListByKey(self, infoFile, key, keyword): 456 self._writePackageInfoList(infoFile, self.recipeKeys[key], keyword) 457 458 def _writePackageInfoList(self, infoFile, theList, keyword): 459 if theList: 460 infoFile.write(keyword + ' {\n') 461 for item in theList: 462 infoFile.write('\t' + item + '\n') 463 infoFile.write('}\n') 464 465 def _writePackageInfoListQuotePaths(self, infoFile, theList, keyword): 466 if theList: 467 infoFile.write(keyword + ' {\n') 468 for item in theList: 469 # quote unquoted components that look like paths 470 components = ConfigParser.splitItem(item) 471 item = '' 472 for component in components: 473 if component[0] != '"' and component.find('/') >= 0: 474 component = '"' + component + '"' 475 if item: 476 item += ' ' 477 item += component 478 infoFile.write('\t' + item + '\n') 479 infoFile.write('}\n') 480 481 def _generateDependencyInfo(self, dependencyInfoPath, requiresToUse, 482 **kwargs): 483 """Create a .DependencyInfo file (used for dependency resolving)""" 484 485 architecture = kwargs.get('architecture', self.architecture) 486 fakeProvides = kwargs.get('fakeProvides', False) 487 488 # If it exists, remove the file first. Otherwise we might write to the 489 # wrong file, if it is a symlink. 490 if os.path.exists(dependencyInfoPath): 491 os.remove(dependencyInfoPath) 492 493 with codecs.open(dependencyInfoPath, 'w', 'utf-8') as infoFile: 494 dependencyInfo = { 495 'name': self.name, 496 'version': self.version, 497 'architecture': architecture, 498 'provides': self.recipeKeys['PROVIDES'], 499 'requires': [], 500 'buildRequires': [], 501 'buildPrerequires': [], 502 'testRequires': [] 503 } 504 505 if fakeProvides: 506 dependencyInfo['provides'] = [] 507 508 requiresKeyMap = { 509 'BUILD_REQUIRES': 'buildRequires', 510 'BUILD_PREREQUIRES': 'buildPrerequires', 511 'TEST_REQUIRES': 'testRequires', 512 'REQUIRES': 'requires', 513 'SCRIPTLET_PREREQUIRES': 'buildPrerequires', 514 } 515 for requiresKey in requiresToUse: 516 if requiresKey == 'SCRIPTLET_PREREQUIRES': 517 # Add prerequirements for executing chroot scriptlets. 518 # For cross-built packages, pass in the target machine name, 519 # but take care to not do that for packages that implement 520 # the cross-building themselves (i.e. binutils and gcc), 521 # as those are running in the context of the build machine. 522 targetMachineTripleAsName = self.targetMachineTripleAsName 523 if (Configuration.isCrossBuildRepository() 524 and '_cross_' in self.name): 525 targetMachineTripleAsName = '' 526 requiresForKey = getScriptletPrerequirements( 527 targetMachineTripleAsName) 528 else: 529 requiresForKey = self.recipeKeys[requiresKey] 530 531 requiresList = dependencyInfo[requiresKeyMap[requiresKey]] 532 for require in requiresForKey: 533 require = require.partition('#')[0].strip() 534 if require and require not in requiresList: 535 requiresList.append(require) 536 537 json.dump(dependencyInfo, infoFile, sort_keys=True, 538 indent=4, separators=(',', ' : ')) 539 infoFile.write('\n') 540 541# -- A source package --------------------------------------------------------- 542 543class SourcePackage(Package): 544 def populatePackagingDir(self, port): 545 """Prefill packaging directory with stuff from the outside""" 546 547 if self.isRiggedSourcePackage: 548 info("Populating rigged source package ...") 549 else: 550 info("Populating source package ...") 551 552 super(SourcePackage, self).populatePackagingDir(port) 553 554 targetBaseDir = (self.packagingDir + '/develop/sources/' 555 + port.revisionedName) 556 for source in port.sources: 557 targetDir = (targetBaseDir + '/' 558 + os.path.basename(source.sourceBaseDir)) 559 # export sources and additional files (if any) 560 source.exportSources(targetDir, self.isRiggedSourcePackage) 561 source.populateAdditionalFiles(targetBaseDir) 562 563 # copy patches, if there are any 564 if port.patchesDir and os.path.exists(port.patchesDir): 565 patchesTargetDir = targetBaseDir + '/patches' 566 for patchFileName in os.listdir(port.patchesDir): 567 if not (patchFileName.startswith(port.versionedName + '.') 568 or patchFileName.startswith(port.versionedName + '-')): 569 continue 570 if not os.path.exists(patchesTargetDir): 571 os.mkdir(patchesTargetDir) 572 patchFilePath = port.patchesDir + '/' + patchFileName 573 shutil.copy(patchFilePath, patchesTargetDir) 574 575 # copy licenses, if there are any 576 if port.licensesDir and os.path.exists(port.licensesDir): 577 licensesTargetDir = targetBaseDir + '/licenses' 578 if not os.path.exists(licensesTargetDir): 579 os.mkdir(licensesTargetDir) 580 for licenseFileName in os.listdir(port.licensesDir): 581 licenseFilePath = port.licensesDir + '/' + licenseFileName 582 shutil.copy(licenseFilePath, licensesTargetDir) 583 584 # add ReadMe with references to the used repositories 585 haikuportsRev = '<unknown>' 586 if os.path.exists(Configuration.getTreePath() + '/.git'): 587 try: 588 ensureCommandIsAvailable('git') 589 haikuportsRev \ 590 = check_output(['git', 'rev-parse', '--short', 'HEAD'], 591 cwd=Configuration.getTreePath(), stderr=STDOUT).decode('utf-8') 592 except: 593 warn('unable to determine revision of haikuports tree') 594 with open(targetBaseDir + '/ReadMe', 'w') as readmeFile: 595 readmeFile.write(( 596 'These are the sources (and optionally patches) that were\n' 597 'used to build the "%s"-package(s).\n\n' 598 'In order to build them, please checkout the haikuports tree\n' 599 'and use the haikuporter tool to run the build for you.\n\n' 600 'haikuports-URL: %s (revision %s)\n' 601 'haikuporter-URL: %s\n') 602 % (port.name, haikuportsRepoUrl, haikuportsRev.strip(), 603 haikuporterRepoUrl)) 604 605 # copy recipe file 606 shutil.copy(port.recipeFilePath, targetBaseDir) 607 608# -- package factory function ------------------------------------------------- 609 610def packageFactory(packageType, name, port, recipeKeys, policy): 611 """Creates a package matching the given type""" 612 613 if packageType == PackageType.SOURCE: 614 return SourcePackage(packageType, name, port, recipeKeys, policy) 615 else: 616 return Package(packageType, name, port, recipeKeys, policy) 617 618# -- source package factory function ------------------------------------------ 619 620def sourcePackageFactory(name, port, recipeKeys, policy, rigged): 621 """Creates a source package""" 622 623 return SourcePackage(PackageType.SOURCE, name, port, recipeKeys, policy, 624 rigged) 625