|
|
|
|
|
|
import os |
|
|
|
import json |
|
|
|
import textwrap |
|
|
|
from http.client import HTTPSConnection |
|
|
|
from base64 import b64encode |
|
|
|
import subprocess |
|
|
|
def create_or_increment(map, key): |
|
|
|
if key not in map: |
|
|
|
map[key] = 1 |
|
|
|
else: |
|
|
|
map[key] += 1 |
|
|
|
|
|
|
|
|
|
|
|
parser.add_argument("folder", help="the folder containing master-package.json and the sub-package folders") |
|
|
|
parser.add_argument("-v", "--verbose", action="store_true", help="tell me what's going on here") |
|
|
|
|
|
|
|
parser.add_argument("-f", "--folder", help="the folder containing master-package.json and the sub-package folders", |
|
|
|
required=True) |
|
|
|
parser.add_argument("-u", "--username", |
|
|
|
help="bintray username, as in <username>@unity (i.e. don't write the `@unity` part)", |
|
|
|
required=True) |
|
|
|
parser.add_argument("-k", "--key", help="bintray API key", required=True) |
|
|
|
parser.add_argument("-p", "--publish", action="store_true", |
|
|
|
help="DANGER: actually do the publish and then clean-up. if not set, only preparation will be " |
|
|
|
"done for inspection purposes") |
|
|
|
parser.add_argument("-d", "--dirty", action="store_true", |
|
|
|
help="don't clean-up files") |
|
|
|
parser.add_argument("-s", "--silent", action="store_true", help="don't tell me what's going on here") |
|
|
|
verbose = args.verbose |
|
|
|
silent = args.silent |
|
|
|
base_folder = os.path.realpath(args.folder) |
|
|
|
|
|
|
|
def error_print(msg): |
|
|
|
|
|
|
print("[WARNING] {}".format(msg)) |
|
|
|
|
|
|
|
def info_print(msg): |
|
|
|
if verbose: |
|
|
|
if not silent: |
|
|
|
print(msg) |
|
|
|
|
|
|
|
info_print("Using folder: {}".format(base_folder)) |
|
|
|
|
|
|
try: |
|
|
|
master_package = json.load(file) |
|
|
|
except json.JSONDecodeError as e: |
|
|
|
error_print("Error: {}".format(e)) |
|
|
|
error_print(e) |
|
|
|
sub_packages = [] |
|
|
|
sub_package_folders = [] |
|
|
|
sub_packages = {} |
|
|
|
sub_package_folders = {} |
|
|
|
for item in os.listdir(base_folder): |
|
|
|
file_path = os.path.join(base_folder, item, "sub-package.json") |
|
|
|
if os.path.isfile(file_path): |
|
|
|
|
|
|
sub_packages.append(json.load(file)) |
|
|
|
sub_package_folders.append(os.path.join(base_folder, item)) |
|
|
|
sub_package = json.load(file) |
|
|
|
sub_packages[sub_package["name"]] = sub_package |
|
|
|
sub_package_folders[sub_package["name"]] = os.path.join(base_folder, item) |
|
|
|
except json.JSONDecodeError as e: |
|
|
|
error_print("Error: {}".format(e)) |
|
|
|
|
|
|
|
|
|
|
if "version" not in master_package: |
|
|
|
error_print("Master package must contain a \"version\" field") |
|
|
|
exit(1) |
|
|
|
info_print("") |
|
|
|
for sub_package in sub_packages: |
|
|
|
for sub_package in sub_packages.values(): |
|
|
|
for sub_package in sub_packages: |
|
|
|
for sub_package in sub_packages.values(): |
|
|
|
info_print("\t{}@{}".format(name, version)) |
|
|
|
for sub_package in sub_packages: |
|
|
|
info_print(" {}@{}".format(name, version)) |
|
|
|
for sub_package in sub_packages.values(): |
|
|
|
info_print("") |
|
|
|
|
|
|
|
info_print("Creating dependency tree:") |
|
|
|
dependency_list = {} |
|
|
|
dependency_tree = {} |
|
|
|
for i, sub_package in enumerate(sub_packages.values()): |
|
|
|
dependency_list[sub_package["name"]] = {} |
|
|
|
for i, sub_package in enumerate(sub_packages.values()): |
|
|
|
if "subDependencies" in sub_package and sub_package["subDependencies"]: |
|
|
|
for dependency in sub_package["subDependencies"]: |
|
|
|
dependency_list[dependency][sub_package["name"]] = dependency_list[sub_package["name"]] |
|
|
|
else: |
|
|
|
dependency_tree[sub_package["name"]] = dependency_list[sub_package["name"]] |
|
|
|
|
|
|
|
if not dependency_tree: |
|
|
|
error_print("Dependency tree is empty. You might have a circular reference.") |
|
|
|
exit(1) |
|
|
|
|
|
|
|
def print_dependency_tree(tree, indent): |
|
|
|
for key, sub_tree in tree.items(): |
|
|
|
info_print(textwrap.indent(key, " " * indent)) |
|
|
|
print_dependency_tree(sub_tree, indent + 1) |
|
|
|
|
|
|
|
print_dependency_tree(dependency_tree, 1) |
|
|
|
info_print("") |
|
|
|
|
|
|
|
info_print("Creating publish order:") |
|
|
|
publish_order = [] |
|
|
|
visited = set() |
|
|
|
|
|
|
|
def fill_publish_order(tree): |
|
|
|
for key, sub_tree in tree.items(): |
|
|
|
if key not in visited: |
|
|
|
publish_order.append(key) |
|
|
|
fill_publish_order(sub_tree) |
|
|
|
|
|
|
|
fill_publish_order(dependency_tree) |
|
|
|
for name in publish_order: |
|
|
|
info_print(" {}".format(name)) |
|
|
|
info_print("") |
|
|
|
for sub_package in sub_packages: |
|
|
|
for sub_package in sub_packages.values(): |
|
|
|
info_print("\t{}:".format(sub_package["name"])) |
|
|
|
info_print(" {}:".format(sub_package["name"])) |
|
|
|
info_print("\t\t{}@{}".format(sub_dependency, sub_package["dependencies"][sub_dependency])) |
|
|
|
info_print(" {}@{}".format(sub_dependency, sub_package["dependencies"][sub_dependency])) |
|
|
|
info_print("") |
|
|
|
for i, sub_package in enumerate(sub_packages): |
|
|
|
package_path = os.path.join(sub_package_folders[i], "package.json") |
|
|
|
info_print("\t{}".format(package_path)) |
|
|
|
for name in publish_order: |
|
|
|
sub_package = sub_packages[name] |
|
|
|
package_path = os.path.join(sub_package_folders[name], "package.json") |
|
|
|
info_print(" {}:".format(package_path)) |
|
|
|
with open(package_path) as file: |
|
|
|
info_print(textwrap.indent(file.read(), " >")) |
|
|
|
info_print("") |
|
|
|
|
|
|
|
info_print("Downloading npm config:") |
|
|
|
c = HTTPSConnection("staging-packages.unity.com") |
|
|
|
auth = b64encode("{}@unity:{}".format(args.username, args.key).encode("ascii")).decode("ascii") |
|
|
|
c.request('GET', '/auth', headers={"Authorization": "Basic %s" % auth}) |
|
|
|
res = c.getresponse() |
|
|
|
npm_config = res.read().decode(res.headers.get_content_charset("ascii")) |
|
|
|
print(textwrap.indent(npm_config, " >")) |
|
|
|
info_print("Writing config to sub-package folders:") |
|
|
|
for folder in sub_package_folders.values(): |
|
|
|
path = os.path.join(folder, ".npmrc") |
|
|
|
with open(path, 'w') as file: |
|
|
|
file.write(npm_config) |
|
|
|
info_print(" {}".format(path)) |
|
|
|
info_print("") |
|
|
|
|
|
|
|
if args.publish: |
|
|
|
for name in publish_order: |
|
|
|
info_print("Publishing {}:".format(name)) |
|
|
|
folder = sub_package_folders[name] |
|
|
|
subprocess.run(["npm", "publish"], cwd=folder, shell=True) |
|
|
|
info_print("") |
|
|
|
|
|
|
|
if not args.dirty: |
|
|
|
info_print("Removing temporary files:") |
|
|
|
files = [] |
|
|
|
for folder in sub_package_folders.values(): |
|
|
|
files.append(os.path.join(folder, "package.json")) |
|
|
|
files.append(os.path.join(folder, ".npmrc")) |
|
|
|
for file in files: |
|
|
|
info_print(" {}".format(file)) |
|
|
|
os.remove(file) |
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__": |