Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merge upstream changes #322

Open
wants to merge 18 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -28,3 +28,4 @@ test.py
backups/*
!backups/.gitkeep
wizard.pyc
__pycache__/
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ There are shell and bash scripts out there, which were created in order to downl
but most of them are not maintained and throwing errors. So, this project is aiming for full backup automation, and therefore this is the features road map:

:white_check_mark: Create a script in python
:white_check_mark: Support creating config.json from user input ('wizard')
:white_check_mark: Support creating config.yaml from user input ('wizard')
:white_check_mark: Download backup file locally
:white_check_mark: Add an option to stream backup file to S3
:white_check_mark: Check how to manually create a cron task on OS X / Linux
Expand All @@ -16,7 +16,7 @@ but most of them are not maintained and throwing errors. So, this project is aim

# Installation
## Prerequisite:
:heavy_plus_sign: python 2.7.x or python 3.x.x
:heavy_plus_sign: python 3
:heavy_plus_sign: [virtualenv](https://pypi.org/project/virtualenv/) installed globally (pip install virtualenv)

## Instructions:
Expand Down
Empty file removed __init__.py
Empty file.
57 changes: 37 additions & 20 deletions backup.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,9 @@
import os
import argparse
import requests
import boto
from boto.s3.key import Key
import boto3
from boto3.s3.transfer import TransferConfig
import wizard
from time import gmtime, strftime


def read_config():
Expand Down Expand Up @@ -38,8 +37,8 @@ def create_confluence_backup(self):
time.sleep(self.wait)
while 'fileName' not in self.backup_status.keys():
self.backup_status = json.loads(self.session.get(confluence_backup_status).text)
print('Current status: {progress}; {description}'.format(
progress=self.backup_status['alternativePercentage'],
print('Current status: {progress}; {description}'.format(
progress=self.backup_status['alternativePercentage'],
description=self.backup_status['currentStatus']))
time.sleep(self.wait)
return 'https://{url}/wiki/download/{file_name}'.format(
Expand All @@ -58,8 +57,8 @@ def create_jira_backup(self):
while 'result' not in self.backup_status.keys():
self.backup_status = json.loads(self.session.get(jira_backup_status).text)
print('Current status: {status} {progress}; {description}'.format(
status=self.backup_status['status'],
progress=self.backup_status['progress'],
status=self.backup_status['status'],
progress=self.backup_status['progress'],
description=self.backup_status['description']))
time.sleep(self.wait)
return '{prefix}/{result_id}'.format(
Expand All @@ -79,22 +78,40 @@ def stream_to_s3(self, url, remote_filename):
print('-> Streaming to S3')

if self.config['UPLOAD_TO_S3']['AWS_ACCESS_KEY'] == '':
connect = boto.connect_s3()
s3_client = boto3.client('s3')
else:
connect = boto.connect_s3(
aws_access_key_id=self.config['UPLOAD_TO_S3']['AWS_ACCESS_KEY'],
aws_secret_access_key=self.config['UPLOAD_TO_S3']['AWS_SECRET_KEY']
)

bucket = connect.get_bucket(self.config['UPLOAD_TO_S3']['S3_BUCKET'])
s3_client = boto3.client(
's3',
aws_access_key_id=self.config['UPLOAD_TO_S3']['AWS_ACCESS_KEY'],
aws_secret_access_key=self.config['UPLOAD_TO_S3']['AWS_SECRET_KEY'],
region_name=self.config['UPLOAD_TO_S3']['AWS_REGION'],
endpoint_url=self.config['UPLOAD_TO_S3']['AWS_ENDPOINT_URL'],
use_ssl=self.config['UPLOAD_TO_S3']['AWS_IS_SECURE']
)

bucket_name = self.config['UPLOAD_TO_S3']['S3_BUCKET']
r = self.session.get(url, stream=True)
if r.status_code == 200:
k = Key(bucket)
k.key = remote_filename
k.content_type = r.headers['content-type']
k.set_contents_from_string(r.content)
return

key = "{s3_bucket}{s3_filename}".format(
s3_bucket=self.config['UPLOAD_TO_S3']['S3_DIR'],
s3_filename=remote_filename
)

content_length = int(r.headers.get('Content-Length', 0))

config = TransferConfig(
multipart_threshold=content_length + 1,
max_concurrency=1,
use_threads=False
)

s3_client.upload_fileobj(
Fileobj=r.raw,
Bucket=bucket_name,
Key=key,
ExtraArgs={'ContentType': r.headers['content-type']},
Config=config
)

if __name__ == '__main__':
parser = argparse.ArgumentParser()
Expand Down
10 changes: 7 additions & 3 deletions config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,11 @@ USER_EMAIL: "email address for the Atlassian account you're using to create the
API_TOKEN: "token ID generated at https://id.atlassian.com/manage/api-tokens"
INCLUDE_ATTACHMENTS: "include attachments? this will make the backup size bigger - true / false"
DOWNLOAD_LOCALLY: "download the backup file to backups folder? true / false"
UPLOAD_TO_S3:
S3_BUCKET: "S3 bucket name (empty value will skip this step)"
UPLOAD_TO_S3:
AWS_ENDPOINT_URL: "amazon S3 endpoints https://docs.aws.amazon.com/general/latest/gr/s3.html"
AWS_REGION: "amazon S3 region"
S3_BUCKET: "S3 bucket name (empty value will skip this step)"
S3_DIR: "S3 directory for upload (example Atlassian/)"
AWS_ACCESS_KEY: "not mandatory if already set on the machine with AWS CLI"
AWS_SECRET_KEY: "not mandatory if already set on the machine with AWS CLI"
AWS_SECRET_KEY: "not mandatory if already set on the machine with AWS CLI"
AWS_IS_SECURE: True
10 changes: 3 additions & 7 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,7 +1,3 @@
boto==2.48.0
certifi==2018.4.16
chardet==3.0.4
idna==2.6
requests==2.20.0
urllib3==1.24.2
PyYAML==5.3.1
boto3==1.35.26
PyYAML==6.0.2
Requests==2.32.3
46 changes: 30 additions & 16 deletions wizard.py
Original file line number Diff line number Diff line change
@@ -1,31 +1,45 @@
import os
import json
import yaml


def create_config():
jira_host = raw_input("What is your Jira host name? ")
user = raw_input("What is your Jira account email address? ")
password = raw_input("Paste your Jira API token: ")
attachments = raw_input("Do you want to include attachments? (true / false) ")
download_locally = raw_input("Do you want to download the backup file locally? (true / false) ")
jira_host = input("What is your Jira host name? ")
user = input("What is your Jira account email address? ")
password = input("Paste your Jira API token: ")
attachments = input("Do you want to include attachments? (true / false) ")
download_locally = input("Do you want to download the backup file locally? (true / false) ")

custom_config = {
'JIRA_HOST': jira_host,
'INCLUDE_ATTACHMENTS': attachments.lower(),
'JIRA_EMAIL': user,
'HOST_URL': jira_host,
'USER_EMAIL': user,
'API_TOKEN': password,
'INCLUDE_ATTACHMENTS': attachments.lower(),
'DOWNLOAD_LOCALLY': download_locally.lower(),
'UPLOAD_TO_S3': {
'AWS_ENDPOINT_URL': "",
'AWS_REGION': "",
'S3_BUCKET': "",
'S3_DIR': "",
'AWS_ACCESS_KEY': "",
'AWS_SECRET_KEY': ""
'AWS_SECRET_KEY': "",
'AWS_IS_SECURE': True
}
}
upload_backup = raw_input("Do you want to upload the backup file to S3? (true / false) ")

upload_backup = input("Do you want to upload the backup file to S3? (true / false) ")
if upload_backup.lower() == 'true':
custom_config['UPLOAD_TO_S3']['S3_BUCKET'] = raw_input("What is the S3 bucket name? ")
custom_config['UPLOAD_TO_S3']['AWS_ACCESS_KEY'] = raw_input("What is your AWS access key? ")
custom_config['UPLOAD_TO_S3']['AWS_SECRET_KEY'] = raw_input("What is your AWS secret key? ")
custom_config['UPLOAD_TO_S3']['AWS_ENDPOINT_URL'] = input("What is your AWS endpoint url? ")
custom_config['UPLOAD_TO_S3']['AWS_REGION'] = input("What is your AWS region? ")
custom_config['UPLOAD_TO_S3']['S3_BUCKET'] = input("What is the S3 bucket name? ")
custom_config['UPLOAD_TO_S3']['S3_DIR'] = input("What is the S3 directory for upload? (example Atlassian/) ")
custom_config['UPLOAD_TO_S3']['AWS_ACCESS_KEY'] = input("What is your AWS access key? ")
custom_config['UPLOAD_TO_S3']['AWS_SECRET_KEY'] = input("What is your AWS secret key? ")
custom_config['UPLOAD_TO_S3']['AWS_IS_SECURE'] = input("Do you want to use SSL? (true / false) ")

config_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'config.json')
config_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'config.yaml')
with open(config_path, 'w+') as config_file:
json.dump(custom_config, config_file)
yaml.dump(custom_config, config_file, default_flow_style=False)


if __name__ == "__main__":
create_config()