mirror of
https://github.com/NixOS/nixpkgs.git
synced 2025-04-15 02:37:54 +00:00
update-python-libraries: format with black/isort
This commit is contained in:
parent
180ccef717
commit
50311a8d07
@ -12,23 +12,24 @@ to update all non-pinned libraries in that folder.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import collections
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import requests
|
||||
from concurrent.futures import ThreadPoolExecutor as Pool
|
||||
from packaging.version import Version as _Version
|
||||
from packaging.version import InvalidVersion
|
||||
from packaging.specifiers import SpecifierSet
|
||||
from typing import Optional, Any
|
||||
import collections
|
||||
import subprocess
|
||||
from concurrent.futures import ThreadPoolExecutor as Pool
|
||||
from typing import Any, Optional
|
||||
|
||||
import requests
|
||||
from packaging.specifiers import SpecifierSet
|
||||
from packaging.version import InvalidVersion
|
||||
from packaging.version import Version as _Version
|
||||
|
||||
INDEX = "https://pypi.io/pypi"
|
||||
"""url of PyPI"""
|
||||
|
||||
EXTENSIONS = ['tar.gz', 'tar.bz2', 'tar', 'zip', '.whl']
|
||||
EXTENSIONS = ["tar.gz", "tar.bz2", "tar", "zip", ".whl"]
|
||||
"""Permitted file extensions. These are evaluated from left to right and the first occurance is returned."""
|
||||
|
||||
PRERELEASES = False
|
||||
@ -37,13 +38,16 @@ BULK_UPDATE = False
|
||||
|
||||
GIT = "git"
|
||||
|
||||
NIXPKGS_ROOT = subprocess.check_output(["git", "rev-parse", "--show-toplevel"]).decode('utf-8').strip()
|
||||
NIXPKGS_ROOT = (
|
||||
subprocess.check_output(["git", "rev-parse", "--show-toplevel"])
|
||||
.decode("utf-8")
|
||||
.strip()
|
||||
)
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
|
||||
class Version(_Version, collections.abc.Sequence):
|
||||
|
||||
def __init__(self, version):
|
||||
super().__init__(version)
|
||||
# We cannot use `str(Version(0.04.21))` because that becomes `0.4.21`
|
||||
@ -65,7 +69,7 @@ def _get_values(attribute, text):
|
||||
|
||||
:returns: List of matches.
|
||||
"""
|
||||
regex = fr'{re.escape(attribute)}\s+=\s+"(.*)";'
|
||||
regex = rf'{re.escape(attribute)}\s+=\s+"(.*)";'
|
||||
regex = re.compile(regex)
|
||||
values = regex.findall(text)
|
||||
return values
|
||||
@ -73,14 +77,18 @@ def _get_values(attribute, text):
|
||||
|
||||
def _get_attr_value(attr_path: str) -> Optional[Any]:
|
||||
try:
|
||||
response = subprocess.check_output([
|
||||
"nix",
|
||||
"--extra-experimental-features", "nix-command",
|
||||
"eval",
|
||||
"-f", f"{NIXPKGS_ROOT}/default.nix",
|
||||
"--json",
|
||||
f"{attr_path}"
|
||||
])
|
||||
response = subprocess.check_output(
|
||||
[
|
||||
"nix",
|
||||
"--extra-experimental-features",
|
||||
"nix-command",
|
||||
"eval",
|
||||
"-f",
|
||||
f"{NIXPKGS_ROOT}/default.nix",
|
||||
"--json",
|
||||
f"{attr_path}",
|
||||
]
|
||||
)
|
||||
return json.loads(response.decode())
|
||||
except (subprocess.CalledProcessError, ValueError):
|
||||
return None
|
||||
@ -100,12 +108,13 @@ def _get_unique_value(attribute, text):
|
||||
else:
|
||||
raise ValueError("no value found for {}".format(attribute))
|
||||
|
||||
|
||||
def _get_line_and_value(attribute, text, value=None):
|
||||
"""Match attribute in text. Return the line and the value of the attribute."""
|
||||
if value is None:
|
||||
regex = rf'({re.escape(attribute)}\s+=\s+\"(.*)\";)'
|
||||
regex = rf"({re.escape(attribute)}\s+=\s+\"(.*)\";)"
|
||||
else:
|
||||
regex = rf'({re.escape(attribute)}\s+=\s+\"({re.escape(value)})\";)'
|
||||
regex = rf"({re.escape(attribute)}\s+=\s+\"({re.escape(value)})\";)"
|
||||
regex = re.compile(regex)
|
||||
results = regex.findall(text)
|
||||
n = len(results)
|
||||
@ -138,7 +147,7 @@ def _fetch_page(url):
|
||||
|
||||
def _fetch_github(url):
|
||||
headers = {}
|
||||
token = os.environ.get('GITHUB_API_TOKEN')
|
||||
token = os.environ.get("GITHUB_API_TOKEN")
|
||||
if token:
|
||||
headers["Authorization"] = f"token {token}"
|
||||
r = requests.get(url, headers=headers)
|
||||
@ -151,31 +160,26 @@ def _fetch_github(url):
|
||||
|
||||
def _hash_to_sri(algorithm, value):
|
||||
"""Convert a hash to its SRI representation"""
|
||||
return subprocess.check_output([
|
||||
"nix",
|
||||
"hash",
|
||||
"to-sri",
|
||||
"--type", algorithm,
|
||||
value
|
||||
]).decode().strip()
|
||||
return (
|
||||
subprocess.check_output(["nix", "hash", "to-sri", "--type", algorithm, value])
|
||||
.decode()
|
||||
.strip()
|
||||
)
|
||||
|
||||
|
||||
def _skip_bulk_update(attr_name: str) -> bool:
|
||||
return bool(_get_attr_value(
|
||||
f"{attr_name}.skipBulkUpdate"
|
||||
))
|
||||
return bool(_get_attr_value(f"{attr_name}.skipBulkUpdate"))
|
||||
|
||||
|
||||
SEMVER = {
|
||||
'major' : 0,
|
||||
'minor' : 1,
|
||||
'patch' : 2,
|
||||
"major": 0,
|
||||
"minor": 1,
|
||||
"patch": 2,
|
||||
}
|
||||
|
||||
|
||||
def _determine_latest_version(current_version, target, versions):
|
||||
"""Determine latest version, given `target`.
|
||||
"""
|
||||
"""Determine latest version, given `target`."""
|
||||
current_version = Version(current_version)
|
||||
|
||||
def _parse_versions(versions):
|
||||
@ -193,7 +197,7 @@ def _determine_latest_version(current_version, target, versions):
|
||||
if len(ceiling) == 0:
|
||||
ceiling = None
|
||||
else:
|
||||
ceiling[-1]+=1
|
||||
ceiling[-1] += 1
|
||||
ceiling = Version(".".join(map(str, ceiling)))
|
||||
|
||||
# We do not want prereleases
|
||||
@ -210,17 +214,19 @@ def _get_latest_version_pypi(package, extension, current_version, target):
|
||||
url = "{}/{}/json".format(INDEX, package)
|
||||
json = _fetch_page(url)
|
||||
|
||||
versions = json['releases'].keys()
|
||||
versions = json["releases"].keys()
|
||||
version = _determine_latest_version(current_version, target, versions)
|
||||
|
||||
try:
|
||||
releases = json['releases'][version]
|
||||
releases = json["releases"][version]
|
||||
except KeyError as e:
|
||||
raise KeyError('Could not find version {} for {}'.format(version, package)) from e
|
||||
raise KeyError(
|
||||
"Could not find version {} for {}".format(version, package)
|
||||
) from e
|
||||
for release in releases:
|
||||
if release['filename'].endswith(extension):
|
||||
if release["filename"].endswith(extension):
|
||||
# TODO: In case of wheel we need to do further checks!
|
||||
sha256 = release['digests']['sha256']
|
||||
sha256 = release["digests"]["sha256"]
|
||||
break
|
||||
else:
|
||||
sha256 = None
|
||||
@ -240,33 +246,40 @@ def _get_latest_version_github(package, extension, current_version, target):
|
||||
attr_path = os.environ.get("UPDATE_NIX_ATTR_PATH", f"python3Packages.{package}")
|
||||
try:
|
||||
homepage = subprocess.check_output(
|
||||
["nix", "eval", "-f", f"{NIXPKGS_ROOT}/default.nix", "--raw", f"{attr_path}.src.meta.homepage"])\
|
||||
.decode('utf-8')
|
||||
[
|
||||
"nix",
|
||||
"eval",
|
||||
"-f",
|
||||
f"{NIXPKGS_ROOT}/default.nix",
|
||||
"--raw",
|
||||
f"{attr_path}.src.meta.homepage",
|
||||
]
|
||||
).decode("utf-8")
|
||||
except Exception as e:
|
||||
raise ValueError(f"Unable to determine homepage: {e}")
|
||||
owner_repo = homepage[len("https://github.com/"):] # remove prefix
|
||||
owner_repo = homepage[len("https://github.com/") :] # remove prefix
|
||||
owner, repo = owner_repo.split("/")
|
||||
|
||||
url = f"https://api.github.com/repos/{owner}/{repo}/releases"
|
||||
all_releases = _fetch_github(url)
|
||||
releases = list(filter(lambda x: not x['prerelease'], all_releases))
|
||||
releases = list(filter(lambda x: not x["prerelease"], all_releases))
|
||||
|
||||
if len(releases) == 0:
|
||||
raise ValueError(f"{homepage} does not contain any stable releases")
|
||||
|
||||
versions = map(lambda x: strip_prefix(x['tag_name']), releases)
|
||||
versions = map(lambda x: strip_prefix(x["tag_name"]), releases)
|
||||
version = _determine_latest_version(current_version, target, versions)
|
||||
|
||||
release = next(filter(lambda x: strip_prefix(x['tag_name']) == version, releases))
|
||||
prefix = get_prefix(release['tag_name'])
|
||||
release = next(filter(lambda x: strip_prefix(x["tag_name"]) == version, releases))
|
||||
prefix = get_prefix(release["tag_name"])
|
||||
|
||||
# some attributes require using the fetchgit
|
||||
git_fetcher_args = []
|
||||
if (_get_attr_value(f"{attr_path}.src.fetchSubmodules")):
|
||||
if _get_attr_value(f"{attr_path}.src.fetchSubmodules"):
|
||||
git_fetcher_args.append("--fetch-submodules")
|
||||
if (_get_attr_value(f"{attr_path}.src.fetchLFS")):
|
||||
if _get_attr_value(f"{attr_path}.src.fetchLFS"):
|
||||
git_fetcher_args.append("--fetch-lfs")
|
||||
if (_get_attr_value(f"{attr_path}.src.leaveDotGit")):
|
||||
if _get_attr_value(f"{attr_path}.src.leaveDotGit"):
|
||||
git_fetcher_args.append("--leave-dotGit")
|
||||
|
||||
if git_fetcher_args:
|
||||
@ -274,8 +287,10 @@ def _get_latest_version_github(package, extension, current_version, target):
|
||||
cmd = [
|
||||
"nix-prefetch-git",
|
||||
f"https://github.com/{owner}/{repo}.git",
|
||||
"--hash", algorithm,
|
||||
"--rev", f"refs/tags/{release['tag_name']}"
|
||||
"--hash",
|
||||
algorithm,
|
||||
"--rev",
|
||||
f"refs/tags/{release['tag_name']}",
|
||||
]
|
||||
cmd.extend(git_fetcher_args)
|
||||
response = subprocess.check_output(cmd)
|
||||
@ -283,45 +298,60 @@ def _get_latest_version_github(package, extension, current_version, target):
|
||||
hash = _hash_to_sri(algorithm, document[algorithm])
|
||||
else:
|
||||
try:
|
||||
hash = subprocess.check_output([
|
||||
"nix-prefetch-url",
|
||||
"--type", "sha256",
|
||||
"--unpack",
|
||||
f"{release['tarball_url']}"
|
||||
], stderr=subprocess.DEVNULL).decode('utf-8').strip()
|
||||
hash = (
|
||||
subprocess.check_output(
|
||||
[
|
||||
"nix-prefetch-url",
|
||||
"--type",
|
||||
"sha256",
|
||||
"--unpack",
|
||||
f"{release['tarball_url']}",
|
||||
],
|
||||
stderr=subprocess.DEVNULL,
|
||||
)
|
||||
.decode("utf-8")
|
||||
.strip()
|
||||
)
|
||||
except (subprocess.CalledProcessError, UnicodeError):
|
||||
# this may fail if they have both a branch and a tag of the same name, attempt tag name
|
||||
tag_url = str(release['tarball_url']).replace("tarball","tarball/refs/tags")
|
||||
hash = subprocess.check_output([
|
||||
"nix-prefetch-url",
|
||||
"--type", "sha256",
|
||||
"--unpack",
|
||||
tag_url
|
||||
], stderr=subprocess.DEVNULL).decode('utf-8').strip()
|
||||
tag_url = str(release["tarball_url"]).replace(
|
||||
"tarball", "tarball/refs/tags"
|
||||
)
|
||||
hash = (
|
||||
subprocess.check_output(
|
||||
["nix-prefetch-url", "--type", "sha256", "--unpack", tag_url],
|
||||
stderr=subprocess.DEVNULL,
|
||||
)
|
||||
.decode("utf-8")
|
||||
.strip()
|
||||
)
|
||||
|
||||
return version, hash, prefix
|
||||
|
||||
|
||||
FETCHERS = {
|
||||
'fetchFromGitHub' : _get_latest_version_github,
|
||||
'fetchPypi' : _get_latest_version_pypi,
|
||||
'fetchurl' : _get_latest_version_pypi,
|
||||
"fetchFromGitHub": _get_latest_version_github,
|
||||
"fetchPypi": _get_latest_version_pypi,
|
||||
"fetchurl": _get_latest_version_pypi,
|
||||
}
|
||||
|
||||
|
||||
DEFAULT_SETUPTOOLS_EXTENSION = 'tar.gz'
|
||||
DEFAULT_SETUPTOOLS_EXTENSION = "tar.gz"
|
||||
|
||||
|
||||
FORMATS = {
|
||||
'setuptools' : DEFAULT_SETUPTOOLS_EXTENSION,
|
||||
'wheel' : 'whl',
|
||||
'pyproject' : 'tar.gz',
|
||||
'flit' : 'tar.gz'
|
||||
"setuptools": DEFAULT_SETUPTOOLS_EXTENSION,
|
||||
"wheel": "whl",
|
||||
"pyproject": "tar.gz",
|
||||
"flit": "tar.gz",
|
||||
}
|
||||
|
||||
|
||||
def _determine_fetcher(text):
|
||||
# Count occurrences of fetchers.
|
||||
nfetchers = sum(text.count('src = {}'.format(fetcher)) for fetcher in FETCHERS.keys())
|
||||
nfetchers = sum(
|
||||
text.count("src = {}".format(fetcher)) for fetcher in FETCHERS.keys()
|
||||
)
|
||||
if nfetchers == 0:
|
||||
raise ValueError("no fetcher.")
|
||||
elif nfetchers > 1:
|
||||
@ -329,7 +359,7 @@ def _determine_fetcher(text):
|
||||
else:
|
||||
# Then we check which fetcher to use.
|
||||
for fetcher in FETCHERS.keys():
|
||||
if 'src = {}'.format(fetcher) in text:
|
||||
if "src = {}".format(fetcher) in text:
|
||||
return fetcher
|
||||
|
||||
|
||||
@ -341,47 +371,46 @@ def _determine_extension(text, fetcher):
|
||||
- fetchurl, we determine the extension from the url.
|
||||
- fetchFromGitHub we simply use `.tar.gz`.
|
||||
"""
|
||||
if fetcher == 'fetchPypi':
|
||||
if fetcher == "fetchPypi":
|
||||
try:
|
||||
src_format = _get_unique_value('format', text)
|
||||
src_format = _get_unique_value("format", text)
|
||||
except ValueError:
|
||||
src_format = None # format was not given
|
||||
src_format = None # format was not given
|
||||
|
||||
try:
|
||||
extension = _get_unique_value('extension', text)
|
||||
extension = _get_unique_value("extension", text)
|
||||
except ValueError:
|
||||
extension = None # extension was not given
|
||||
extension = None # extension was not given
|
||||
|
||||
if extension is None:
|
||||
if src_format is None:
|
||||
src_format = 'setuptools'
|
||||
elif src_format == 'other':
|
||||
src_format = "setuptools"
|
||||
elif src_format == "other":
|
||||
raise ValueError("Don't know how to update a format='other' package.")
|
||||
extension = FORMATS[src_format]
|
||||
|
||||
elif fetcher == 'fetchurl':
|
||||
url = _get_unique_value('url', text)
|
||||
elif fetcher == "fetchurl":
|
||||
url = _get_unique_value("url", text)
|
||||
extension = os.path.splitext(url)[1]
|
||||
if 'pypi' not in url:
|
||||
raise ValueError('url does not point to PyPI.')
|
||||
if "pypi" not in url:
|
||||
raise ValueError("url does not point to PyPI.")
|
||||
|
||||
elif fetcher == 'fetchFromGitHub':
|
||||
elif fetcher == "fetchFromGitHub":
|
||||
extension = "tar.gz"
|
||||
|
||||
return extension
|
||||
|
||||
|
||||
def _update_package(path, target):
|
||||
|
||||
# Read the expression
|
||||
with open(path, 'r') as f:
|
||||
with open(path, "r") as f:
|
||||
text = f.read()
|
||||
|
||||
# Determine pname. Many files have more than one pname
|
||||
pnames = _get_values('pname', text)
|
||||
pnames = _get_values("pname", text)
|
||||
|
||||
# Determine version.
|
||||
version = _get_unique_value('version', text)
|
||||
version = _get_unique_value("version", text)
|
||||
|
||||
# First we check how many fetchers are mentioned.
|
||||
fetcher = _determine_fetcher(text)
|
||||
@ -396,7 +425,9 @@ def _update_package(path, target):
|
||||
elif _get_attr_value(f"python3Packages.{pname}.cargoDeps") is not None:
|
||||
raise ValueError(f"Cargo dependencies are unsupported, skipping {pname}")
|
||||
try:
|
||||
new_version, new_sha256, prefix = FETCHERS[fetcher](pname, extension, version, target)
|
||||
new_version, new_sha256, prefix = FETCHERS[fetcher](
|
||||
pname, extension, version, target
|
||||
)
|
||||
successful_fetch = True
|
||||
break
|
||||
except ValueError:
|
||||
@ -413,7 +444,7 @@ def _update_package(path, target):
|
||||
if not new_sha256:
|
||||
raise ValueError("no file available for {}.".format(pname))
|
||||
|
||||
text = _replace_value('version', new_version, text)
|
||||
text = _replace_value("version", new_version, text)
|
||||
|
||||
# hashes from pypi are 16-bit encoded sha256's, normalize it to sri to avoid merge conflicts
|
||||
# sri hashes have been the default format since nix 2.4+
|
||||
@ -423,16 +454,16 @@ def _update_package(path, target):
|
||||
if old_hash := _get_attr_value(f"python3Packages.{pname}.src.outputHash"):
|
||||
# fetchers can specify a sha256, or a sri hash
|
||||
try:
|
||||
text = _replace_value('hash', sri_hash, text, old_hash)
|
||||
text = _replace_value("hash", sri_hash, text, old_hash)
|
||||
except ValueError:
|
||||
text = _replace_value('sha256', sri_hash, text, old_hash)
|
||||
text = _replace_value("sha256", sri_hash, text, old_hash)
|
||||
else:
|
||||
raise ValueError(f"Unable to retrieve old hash for {pname}")
|
||||
|
||||
if fetcher == 'fetchFromGitHub':
|
||||
if fetcher == "fetchFromGitHub":
|
||||
# in the case of fetchFromGitHub, it's common to see `rev = version;` or `rev = "v${version}";`
|
||||
# in which no string value is meant to be substituted. However, we can just overwrite the previous value.
|
||||
regex = r'(rev\s+=\s+[^;]*;)'
|
||||
regex = r"(rev\s+=\s+[^;]*;)"
|
||||
regex = re.compile(regex)
|
||||
matches = regex.findall(text)
|
||||
n = len(matches)
|
||||
@ -444,30 +475,33 @@ def _update_package(path, target):
|
||||
match = matches[0]
|
||||
text = text.replace(match, f'rev = "refs/tags/{prefix}${{version}}";')
|
||||
# incase there's no prefix, just rewrite without interpolation
|
||||
text = text.replace('"${version}";', 'version;')
|
||||
text = text.replace('"${version}";', "version;")
|
||||
|
||||
with open(path, 'w') as f:
|
||||
with open(path, "w") as f:
|
||||
f.write(text)
|
||||
|
||||
logging.info("Path {}: updated {} from {} to {}".format(path, pname, version, new_version))
|
||||
logging.info(
|
||||
"Path {}: updated {} from {} to {}".format(
|
||||
path, pname, version, new_version
|
||||
)
|
||||
)
|
||||
|
||||
result = {
|
||||
'path' : path,
|
||||
'target': target,
|
||||
'pname': pname,
|
||||
'old_version' : version,
|
||||
'new_version' : new_version,
|
||||
"path": path,
|
||||
"target": target,
|
||||
"pname": pname,
|
||||
"old_version": version,
|
||||
"new_version": new_version,
|
||||
#'fetcher' : fetcher,
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _update(path, target):
|
||||
|
||||
# We need to read and modify a Nix expression.
|
||||
if os.path.isdir(path):
|
||||
path = os.path.join(path, 'default.nix')
|
||||
path = os.path.join(path, "default.nix")
|
||||
|
||||
# If a default.nix does not exist, we quit.
|
||||
if not os.path.isfile(path):
|
||||
@ -487,35 +521,41 @@ def _update(path, target):
|
||||
|
||||
|
||||
def _commit(path, pname, old_version, new_version, pkgs_prefix="python: ", **kwargs):
|
||||
"""Commit result.
|
||||
"""
|
||||
"""Commit result."""
|
||||
|
||||
msg = f'{pkgs_prefix}{pname}: {old_version} -> {new_version}'
|
||||
msg = f"{pkgs_prefix}{pname}: {old_version} -> {new_version}"
|
||||
|
||||
if changelog := _get_attr_value(f"{pkgs_prefix}{pname}.meta.changelog"):
|
||||
msg += f"\n\n{changelog}"
|
||||
|
||||
try:
|
||||
subprocess.check_call([GIT, 'add', path])
|
||||
subprocess.check_call([GIT, 'commit', '-m', msg])
|
||||
subprocess.check_call([GIT, "add", path])
|
||||
subprocess.check_call([GIT, "commit", "-m", msg])
|
||||
except subprocess.CalledProcessError as e:
|
||||
subprocess.check_call([GIT, 'checkout', path])
|
||||
raise subprocess.CalledProcessError(f'Could not commit {path}') from e
|
||||
subprocess.check_call([GIT, "checkout", path])
|
||||
raise subprocess.CalledProcessError(f"Could not commit {path}") from e
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
epilog = """
|
||||
environment variables:
|
||||
GITHUB_API_TOKEN\tGitHub API token used when updating github packages
|
||||
"""
|
||||
parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter, epilog=epilog)
|
||||
parser.add_argument('package', type=str, nargs='+')
|
||||
parser.add_argument('--target', type=str, choices=SEMVER.keys(), default='major')
|
||||
parser.add_argument('--commit', action='store_true', help='Create a commit for each package update')
|
||||
parser.add_argument('--use-pkgs-prefix', action='store_true', help='Use python3Packages.${pname}: instead of python: ${pname}: when making commits')
|
||||
parser = argparse.ArgumentParser(
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter, epilog=epilog
|
||||
)
|
||||
parser.add_argument("package", type=str, nargs="+")
|
||||
parser.add_argument("--target", type=str, choices=SEMVER.keys(), default="major")
|
||||
parser.add_argument(
|
||||
"--commit", action="store_true", help="Create a commit for each package update"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--use-pkgs-prefix",
|
||||
action="store_true",
|
||||
help="Use python3Packages.${pname}: instead of python: ${pname}: when making commits",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
target = args.target
|
||||
@ -550,6 +590,5 @@ environment variables:
|
||||
logging.info("{} package(s) updated".format(count))
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
Loading…
Reference in New Issue
Block a user