diff --git a/benchmark/gitlab-extract.py b/benchmark/gitlab-extract.py
index 8101bb82b6cc90aaffd76cf1fc26374882b787a1..07f4727c22e343e6256a5325011c9aec5571d866 100755
--- a/benchmark/gitlab-extract.py
+++ b/benchmark/gitlab-extract.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python3
-import argparse, pprint, sys
+import argparse, pprint, sys, glob, zipfile
 import requests
 import parse_log
 
@@ -35,6 +35,9 @@ parser.add_argument("-f", "--file",
 parser.add_argument("-c", "--commits",
                     dest="commits",
                     help="The commits to fetch. Default is everything since the most recent entry in the log file.")
+parser.add_argument("-a", "--artifacts",
+                    dest="artifacts",
+                    help="Location of the artifacts (following GitLab's folder structure).  If not given (which should be the common case), the artifacts will be downloaded from GitLab.")
 args = parser.parse_args()
 log_file = sys.stdout if args.file == "-" else open(args.file, "a")
 
@@ -72,13 +75,27 @@ for commit in parse_log.parse_git_commits(args.commits):
             # build failed or cancelled, skip to next
             continue
         # now fetch the build times
-        build_times = requests.get("{}/builds/{}/artifacts/file/build-time.txt".format(project['web_url'], build['id']))
-        if build_times.status_code != 200:
-            # no artifact at this build, try another one
-            continue
-        # Output in the log file format
-        log_file.write("# {}\n".format(commit))
-        log_file.write(build_times.text)
-        log_file.flush()
-        # don't fetch another one
-        break
+        if args.artifacts:
+            artifact_zip = glob.glob('{}/*/{}/{}/artifacts.zip'.format(args.artifacts, project['id'], build['id']))
+            if not artifact_zip:
+                # no artifact at this build, try another one
+                continue
+            assert len(artifact_zip) == 1, "Found too many artifacts"
+            artifact_zip = artifact_zip[0]
+            with zipfile.ZipFile(artifact_zip) as artifact:
+                with artifact.open('build-time.txt') as build_times:
+                    # Output into log file
+                    log_file.write("# {}\n".format(commit))
+                    log_file.write(build_times.read().decode('UTF-8'))
+                    log_file.flush()
+        else:
+            build_times = requests.get("{}/builds/{}/artifacts/raw/build-time.txt".format(project['web_url'], build['id']))
+            if build_times.status_code != 200:
+                # no artifact at this build, try another one
+                continue
+            # Output in the log file format
+            log_file.write("# {}\n".format(commit))
+            log_file.write(build_times.text)
+            log_file.flush()
+            # don't fetch another build
+            break
diff --git a/benchmark/parse_log.py b/benchmark/parse_log.py
index f5474f285b54eeddaae2ac092d63228cff2d67bc..1649fe42e83cdc8a862796f5e4c5dbaa681bb8d5 100644
--- a/benchmark/parse_log.py
+++ b/benchmark/parse_log.py
@@ -45,4 +45,7 @@ def parse_git_commits(commits):
     else:
         # a single commit
         commits = subprocess.check_output(["git", "rev-parse", commits])
-    return reversed(commits.decode("utf-8").strip().split('\n'))
+    output = commits.decode("utf-8").strip()
+    if not output: # empty output
+        return []
+    return reversed(output.split('\n'))