Commit 4a29e9cb authored by Wez Furlong's avatar Wez Furlong Committed by Facebook GitHub Bot

getdeps: export fbsource commit date into environment

Summary:
This diff extracts the fbsource commit hash and the date of that
commit and maintains that in place of just the commit hash that
we were previously extracting.

This data is exported into the environment that we pass on to
builders so that it is available if they choose to use it.

In a follow on diff I'll use this to default a version number
in the watchman project.

Reviewed By: fanzeyi

Differential Revision: D20949666

fbshipit-source-id: dc12bffe5f0efc4297b15ba0140c4c67a23ab0fd
parent 8045a2a0
...@@ -14,6 +14,7 @@ import sys ...@@ -14,6 +14,7 @@ import sys
import tempfile import tempfile
from .envfuncs import Env, add_path_entry from .envfuncs import Env, add_path_entry
from .fetcher import get_fbsource_repo_data
from .manifest import ContextGenerator from .manifest import ContextGenerator
from .platform import HostType, is_windows from .platform import HostType, is_windows
...@@ -205,6 +206,9 @@ class BuildOptions(object): ...@@ -205,6 +206,9 @@ class BuildOptions(object):
env["NODE_BIN"] = os.path.join( env["NODE_BIN"] = os.path.join(
self.fbsource_dir, "xplat/third-party/node/bin/", node_exe self.fbsource_dir, "xplat/third-party/node/bin/", node_exe
) )
hash_data = get_fbsource_repo_data(self)
env["FBSOURCE_HASH"] = hash_data.hash
env["FBSOURCE_DATE"] = hash_data.date
lib_path = None lib_path = None
if self.is_darwin(): if self.is_darwin():
......
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. # Copyright (c) Facebook, Inc. and its affiliates.
# #
# This source code is licensed under the MIT license found in the # This source code is licensed under the MIT license found in the
...@@ -16,6 +17,8 @@ import sys ...@@ -16,6 +17,8 @@ import sys
import tarfile import tarfile
import time import time
import zipfile import zipfile
from datetime import datetime
from typing import Dict, NamedTuple
from .copytree import prefetch_dir_if_eden from .copytree import prefetch_dir_if_eden
from .envfuncs import Env from .envfuncs import Env
...@@ -424,29 +427,41 @@ class ShipitPathMap(object): ...@@ -424,29 +427,41 @@ class ShipitPathMap(object):
return change_status return change_status
FBSOURCE_REPO_HASH = {} class FbsourceRepoData(NamedTuple):
hash: str
date: str
def get_fbsource_repo_hash(build_options): FBSOURCE_REPO_DATA: Dict[str, FbsourceRepoData] = {}
""" Returns the hash for the fbsource repo.
def get_fbsource_repo_data(build_options):
""" Returns the commit metadata for the fbsource repo.
Since we may have multiple first party projects to Since we may have multiple first party projects to
hash, and because we don't mutate the repo, we cache hash, and because we don't mutate the repo, we cache
this hash in a global. """ this hash in a global. """
global FBSOURCE_REPO_HASH cached_data = FBSOURCE_REPO_DATA.get(build_options.fbsource_dir)
cached_hash = FBSOURCE_REPO_HASH.get(build_options.fbsource_dir) if cached_data:
if cached_hash: return cached_data
return cached_hash
cmd = ["hg", "log", "-r.", "-T{node}"] cmd = ["hg", "log", "-r.", "-T{node}\n{date|hgdate}"]
env = Env() env = Env()
env.set("HGPLAIN", "1") env.set("HGPLAIN", "1")
cached_hash = subprocess.check_output( log_data = subprocess.check_output(
cmd, cwd=build_options.fbsource_dir, env=dict(env.items()) cmd, cwd=build_options.fbsource_dir, env=dict(env.items())
).decode("ascii") ).decode("ascii")
FBSOURCE_REPO_HASH[build_options.fbsource_dir] = cached_hash (hash, datestr) = log_data.split("\n")
# datestr is like "seconds fractionalseconds"
# We want "20200324.113140"
(unixtime, _fractional) = datestr.split(" ")
date = datetime.fromtimestamp(int(unixtime)).strftime("%Y%m%d.%H%M%S")
cached_data = FbsourceRepoData(hash=hash, date=date)
FBSOURCE_REPO_DATA[build_options.fbsource_dir] = cached_data
return cached_hash return cached_data
class SimpleShipitTransformerFetcher(Fetcher): class SimpleShipitTransformerFetcher(Fetcher):
...@@ -576,7 +591,7 @@ def download_url_to_file_with_progress(url, file_name): ...@@ -576,7 +591,7 @@ def download_url_to_file_with_progress(url, file_name):
start = time.time() start = time.time()
try: try:
(_filename, headers) = urlretrieve(url, file_name, reporthook=progress.progress) (_filename, headers) = urlretrieve(url, file_name, reporthook=progress.progress)
except (OSError, IOError) as exc: except (OSError, IOError) as exc: # noqa: B014
raise TransientFailure( raise TransientFailure(
"Failed to download %s to %s: %s" % (url, file_name, str(exc)) "Failed to download %s to %s: %s" % (url, file_name, str(exc))
) )
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment