git-backup.py (4432B) - raw
1 #!/usr/bin/env python3 2 # Copyright (C) 2019 Oscar Benedito 3 # 4 # This program is free software: you can redistribute it and/or modify 5 # it under the terms of the GNU Affero General Public License as 6 # published by the Free Software Foundation, either version 3 of the 7 # License, or (at your option) any later version. 8 # 9 # This program is distributed in the hope that it will be useful, 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 12 # GNU Affero General Public License for more details. 13 # 14 # You should have received a copy of the GNU Affero General Public License 15 # along with this program. If not, see <https://www.gnu.org/licenses/>. 16 17 import os 18 import requests 19 import json 20 import datetime 21 import git 22 23 24 def get_repositories_data_gitlab(url, page): 25 response = requests.get(url + '&page=' + str(page)) 26 return response.json() 27 28 29 def get_repositories_data_github(url, token, page): 30 headers = {'Authorization': 'token ' + token} 31 response = requests.get(url + '?page=' + str(page), headers=headers) 32 return response.json() 33 34 35 backup_data = {} 36 backup_data['time'] = str(datetime.datetime.now()) 37 backup_data['sites'] = {} 38 39 with open('tokens.json', 'r') as tokens_file: 40 tokens = json.load(tokens_file) 41 42 43 # gitlab.com 44 if 'gitlab.com' in tokens: 45 url = 'https://gitlab.com/api/v4/projects?private_token=' + tokens['gitlab.com'] + '&per_page=100&membership=true' 46 page = 1 47 repositories = get_repositories_data_gitlab(url, page) 48 49 backup_data['sites']['gitlab.com'] = [] 50 while len(repositories) != 0: 51 for repository in repositories: 52 clone_dir = 'repositories/gitlab.com/' + repository['path_with_namespace'] 53 print('gitlab.com/' + repository['path_with_namespace']) 54 if os.path.isdir(clone_dir): 55 git.cmd.Git(clone_dir).fetch() 56 else: 57 os.system('git clone --mirror ' + repository['ssh_url_to_repo'] + ' ' + clone_dir) 58 backup_data['sites']['gitlab.com'].append({ 59 'name': repository['name'], 60 'description': repository['description'], 61 'path': repository['path_with_namespace'], 62 'ssh_url': repository['ssh_url_to_repo'] 63 }) 64 page += 1 65 repositories = get_repositories_data_gitlab(url, page) 66 67 # github.com 68 if 'github.com' in tokens: 69 url = 'https://api.github.com/user/repos' 70 page = 1 71 repositories = get_repositories_data_github(url, tokens['github.com'], page) 72 73 backup_data['sites']['github.com'] = [] 74 while len(repositories) != 0: 75 for repository in repositories: 76 clone_dir = 'repositories/github.com/' + repository['full_name'] 77 print('github.com/' + repository['full_name']) 78 if os.path.isdir(clone_dir): 79 git.cmd.Git(clone_dir).fetch() 80 else: 81 os.system('git clone --mirror ' + repository['ssh_url'] + ' ' + clone_dir) 82 backup_data['sites']['github.com'].append({ 83 'name': repository['name'], 84 'description': repository['description'], 85 'path': repository['full_name'], 86 'ssh_url': repository['ssh_url'] 87 }) 88 page += 1 89 repositories = get_repositories_data_github(url, tokens['github.com'], page) 90 91 # custom 92 if os.path.exists('custom_directories.json'): 93 with open('custom_directories.json', 'r') as custom_file: 94 repositories = json.load(custom_file) 95 96 for repository in repositories: 97 clone_dir = 'repositories/' + repository['host'] + '/' + repository['path'] 98 print(repository['host'] + '/' + repository['path']) 99 if os.path.isdir(clone_dir): 100 git.cmd.Git(clone_dir).fetch() 101 else: 102 os.system('git clone --mirror ' + repository['ssh_url'] + ' ' + clone_dir) 103 if repository['host'] not in backup_data['sites']: 104 backup_data['sites'][repository['host']] = [] 105 backup_data['sites'][repository['host']].append({ 106 'name': repository['name'], 107 'description': repository['description'], 108 'path': repository['path'], 109 'ssh_url': repository['ssh_url'] 110 }) 111 112 with open('backup_data.json', 'w', encoding='utf-8') as output_file: 113 json.dump(backup_data, output_file, ensure_ascii=False) 114 output_file.close()