Skip to content
Snippets Groups Projects
Commit 1480bcc0 authored by Alessandro Di Federico's avatar Alessandro Di Federico
Browse files

Reorganize Clone.get_remote_head

parent c16eacc4
No related branches found
No related tags found
No related merge requests found
...@@ -35,66 +35,69 @@ class CloneAction(ActionForComponent): ...@@ -35,66 +35,69 @@ class CloneAction(ActionForComponent):
def _is_satisfied(self): def _is_satisfied(self):
return os.path.exists(self.environment["SOURCE_DIR"]) return os.path.exists(self.environment["SOURCE_DIR"])
def get_remote_head(self): def branches(self):
# First, check local checkout # First, check local checkout
if self.component.from_source: if self.component.from_source:
source_dir = self.environment["SOURCE_DIR"] source_dir = self.environment["SOURCE_DIR"]
if os.path.exists(source_dir): if os.path.exists(source_dir):
result = self._ls_remote(self.environment["SOURCE_DIR"]) return self._branches_from_remote(source_dir)
branch, commit = self._commit_from_ls_remote(result)
if commit:
return branch, commit
cache_filepath = os.path.join(self.config.orchestra_dotdir, "remote_refs_cache.json") cache_filepath = os.path.join(self.config.orchestra_dotdir,
"remote_refs_cache.json")
# Check the cache
if os.path.exists(cache_filepath): if os.path.exists(cache_filepath):
with open(cache_filepath, "rb") as f: with open(cache_filepath, "rb") as f:
cached_data = json.loads(f.read()) cached_data = json.loads(f.read())
if self.component.name in cached_data: if self.component.name in cached_data:
return tuple(cached_data[self.component.name]) return cached_data[self.component.name]
# Check all the remotes
remotes = [f"{base_url}/{self.repository}" remotes = [f"{base_url}/{self.repository}"
for base_url for base_url
in self.config.remotes.values()] in self.config.remotes.values()]
for remote in remotes: for remote in remotes:
result = self._ls_remote(remote) result = self._branches_from_remote(remote)
branch, commit = self._commit_from_ls_remote(result)
if result: if result:
# We have a result, cache and return it
if os.path.exists(cache_filepath): if os.path.exists(cache_filepath):
with open(cache_filepath, "rb") as f: with open(cache_filepath, "rb") as f:
cached_data = json.loads(f.read()) cached_data = json.loads(f.read())
else: else:
cached_data = {} cached_data = {}
cached_data[self.component.name] = [branch, commit] cached_data[self.component.name] = result
# TODO: prevent race condition, if two clone actions run at the same time # TODO: prevent race condition, if two clone actions run at the same time
with open(cache_filepath, "w") as f: with open(cache_filepath, "w") as f:
json.dump(cached_data, f) json.dump(cached_data, f)
if commit: return result
return branch, commit
return None, None return None
def branch(self):
branches = self.branches()
if branches:
for branch in self.config.branches:
if branch in branches:
return branch, branches[branch]
def _commit_from_ls_remote(self, result):
parse_regex = re.compile(r"(?P<commit>[a-f0-9]*)\W*refs/heads/(?P<branch>.*)")
remote_branches = {branch: commit
for commit, branch
in parse_regex.findall(result)}
for branch in self.config.branches:
if branch in remote_branches:
return branch, remote_branches[branch]
return None, None return None, None
def _ls_remote(self, remote): def _branches_from_remote(self, remote):
env = dict(self.environment) env = dict(self.environment)
env["GIT_SSH_COMMAND"] = "ssh -oControlPath=~/.ssh/ssh-mux-%r@%h:%p -oControlMaster=auto -o ControlPersist=10" env["GIT_SSH_COMMAND"] = "ssh -oControlPath=~/.ssh/ssh-mux-%r@%h:%p -oControlMaster=auto -o ControlPersist=10"
data = run_script(
result = run_script(
f'git ls-remote -h --refs "{remote}"', f'git ls-remote -h --refs "{remote}"',
quiet=True, quiet=True,
environment=env, environment=env,
check_returncode=False check_returncode=False
).stdout.decode("utf-8") ).stdout.decode("utf-8")
return data parse_regex = re.compile(r"(?P<commit>[a-f0-9]*)\W*refs/heads/(?P<branch>.*)")
return {branch: commit
for commit, branch
in parse_regex.findall(result)}
...@@ -48,7 +48,7 @@ def handle_update(args): ...@@ -48,7 +48,7 @@ def handle_update(args):
if component.clone] if component.clone]
for component in tqdm(clonable_components, unit="components"): for component in tqdm(clonable_components, unit="components"):
logger.info(f"Fetching the latest remote commit for {component.name}") logger.info(f"Fetching the latest remote commit for {component.name}")
_, _ = component.clone.get_remote_head() _, _ = component.clone.branch()
to_pull = [] to_pull = []
for _, component in config.components.items(): for _, component in config.components.items():
......
...@@ -35,7 +35,7 @@ class Build: ...@@ -35,7 +35,7 @@ class Build:
def self_hash(self): def self_hash(self):
serialized_build = self.serialized_build serialized_build = self.serialized_build
if self.component.clone: if self.component.clone:
branch, commit = self.component.clone.get_remote_head() branch, commit = self.component.clone.branch()
if commit: if commit:
serialized_build = commit.encode("utf-8") + serialized_build serialized_build = commit.encode("utf-8") + serialized_build
return hashlib.sha1(serialized_build).hexdigest() return hashlib.sha1(serialized_build).hexdigest()
......
...@@ -32,13 +32,13 @@ class Component: ...@@ -32,13 +32,13 @@ class Component:
def commit(self): def commit(self):
if self.clone is None: if self.clone is None:
return None return None
branch, commit = self.clone.get_remote_head() branch, commit = self.clone.branch()
return commit return commit
def branch(self): def branch(self):
if self.clone is None: if self.clone is None:
return None return None
branch, commit = self.clone.get_remote_head() branch, commit = self.clone.branch()
return branch return branch
def __str__(self): def __str__(self):
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment