mirror of
https://github.com/scito/extract_otp_secret_keys.git
synced 2025-12-07 23:35:07 +01:00
refactor to satisfy flak8
This commit is contained in:
parent
7f8250c0a6
commit
175e7c38e5
5 changed files with 37 additions and 8 deletions
8
.flake8
Normal file
8
.flake8
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
[flake8]
|
||||
ignore =
|
||||
E701
|
||||
exclude =
|
||||
protobuf_generated_python
|
||||
__pycache__
|
||||
per-file-ignores =
|
||||
extract_otp_secret_keys.py: F821, F401
|
||||
|
|
@ -26,7 +26,7 @@ jobs:
|
|||
# stop the build if there are Python syntax errors or undefined names
|
||||
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
|
||||
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
|
||||
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
|
||||
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=200 --statistics
|
||||
- name: Test with pytest
|
||||
run: |
|
||||
pytest
|
||||
|
|
|
|||
|
|
@ -49,17 +49,20 @@ import csv
|
|||
import json
|
||||
from urllib.parse import parse_qs, urlencode, urlparse, quote
|
||||
from os import path, mkdir
|
||||
from re import sub, compile as rcompile
|
||||
from re import compile as rcompile
|
||||
import protobuf_generated_python.google_auth_pb2
|
||||
|
||||
|
||||
# https://stackoverflow.com/questions/40226049/find-enums-listed-in-python-descriptor-for-protobuf
|
||||
def get_enum_name_by_number(parent, field_name):
|
||||
field_value = getattr(parent, field_name)
|
||||
return parent.DESCRIPTOR.fields_by_name[field_name].enum_type.values_by_number.get(field_value).name
|
||||
|
||||
|
||||
def convert_secret_from_bytes_to_base32_str(bytes):
|
||||
return str(base64.b32encode(bytes), 'utf-8').replace('=', '')
|
||||
|
||||
|
||||
def save_qr(data, name):
|
||||
global verbose
|
||||
qr = QRCode()
|
||||
|
|
@ -68,11 +71,13 @@ def save_qr(data, name):
|
|||
if verbose: print('Saving to {}'.format(name))
|
||||
img.save(name)
|
||||
|
||||
|
||||
def print_qr(data):
|
||||
qr = QRCode()
|
||||
qr.add_data(data)
|
||||
qr.print_ascii()
|
||||
|
||||
|
||||
def parse_args(sys_args):
|
||||
arg_parser = argparse.ArgumentParser()
|
||||
arg_parser.add_argument('--verbose', '-v', help='verbose output', action='store_true')
|
||||
|
|
@ -88,9 +93,11 @@ def parse_args(sys_args):
|
|||
sys.exit(1)
|
||||
return args
|
||||
|
||||
|
||||
def sys_main():
|
||||
main(sys.argv[1:])
|
||||
|
||||
|
||||
def main(sys_args):
|
||||
global verbose, quiet
|
||||
args = parse_args(sys_args)
|
||||
|
|
@ -102,6 +109,7 @@ def main(sys_args):
|
|||
write_csv(args, otps)
|
||||
write_json(args, otps)
|
||||
|
||||
|
||||
def extract_otps(args):
|
||||
global verbose, quiet
|
||||
quiet = args.quiet
|
||||
|
|
@ -115,7 +123,7 @@ def extract_otps(args):
|
|||
if not line.startswith('otpauth-migration://'): print('\nWARN: line is not a otpauth-migration:// URL\ninput file: {}\nline "{}"\nProbably a wrong file was given'.format(args.infile, line))
|
||||
parsed_url = urlparse(line)
|
||||
params = parse_qs(parsed_url.query)
|
||||
if not 'data' in params:
|
||||
if 'data' not in params:
|
||||
print('\nERROR: no data query parameter in input URL\ninput file: {}\nline "{}"\nProbably a wrong file was given'.format(args.infile, line))
|
||||
sys.exit(1)
|
||||
data_encoded = params['data'][0]
|
||||
|
|
@ -159,6 +167,7 @@ def extract_otps(args):
|
|||
})
|
||||
return otps
|
||||
|
||||
|
||||
def write_csv(args, otps):
|
||||
global verbose, quiet
|
||||
if args.csv and len(otps) > 0:
|
||||
|
|
@ -168,6 +177,7 @@ def write_csv(args, otps):
|
|||
writer.writerows(otps)
|
||||
if not quiet: print("Exported {} otps to csv".format(len(otps)))
|
||||
|
||||
|
||||
def write_json(args, otps):
|
||||
global verbose, quiet
|
||||
if args.json:
|
||||
|
|
@ -175,5 +185,6 @@ def write_json(args, otps):
|
|||
json.dump(otps, outfile, indent=4)
|
||||
if not quiet: print("Exported {} otp entries to json".format(len(otps)))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys_main()
|
||||
|
|
|
|||
|
|
@ -24,6 +24,7 @@ import os
|
|||
|
||||
import extract_otp_secret_keys
|
||||
|
||||
|
||||
def test_extract_csv():
|
||||
# Arrange
|
||||
cleanup()
|
||||
|
|
@ -35,11 +36,12 @@ def test_extract_csv():
|
|||
expected_csv = read_csv('example_output.csv')
|
||||
actual_csv = read_csv('test_example_output.csv')
|
||||
|
||||
assert actual_csv == actual_csv
|
||||
assert actual_csv == expected_csv
|
||||
|
||||
# Clean up
|
||||
cleanup()
|
||||
|
||||
|
||||
def test_extract_json():
|
||||
# Arrange
|
||||
cleanup()
|
||||
|
|
@ -55,13 +57,16 @@ def test_extract_json():
|
|||
# Clean up
|
||||
cleanup()
|
||||
|
||||
|
||||
def cleanup():
|
||||
remove_file('test_example_output.csv')
|
||||
remove_file('test_example_output.json')
|
||||
|
||||
|
||||
def remove_file(filename):
|
||||
if os.path.exists(filename): os.remove(filename)
|
||||
|
||||
|
||||
def read_csv(filename):
|
||||
"""Returns a list of lines."""
|
||||
with open(filename, "r") as infile:
|
||||
|
|
@ -71,6 +76,7 @@ def read_csv(filename):
|
|||
lines.append(line)
|
||||
return lines
|
||||
|
||||
|
||||
def read_json(filename):
|
||||
"""Returns a list or a dictionary."""
|
||||
with open(filename, "r") as infile:
|
||||
|
|
|
|||
|
|
@ -25,6 +25,7 @@ import os
|
|||
|
||||
import extract_otp_secret_keys
|
||||
|
||||
|
||||
class TestExtract(unittest.TestCase):
|
||||
|
||||
def test_extract_csv(self):
|
||||
|
|
@ -53,9 +54,11 @@ class TestExtract(unittest.TestCase):
|
|||
remove_file('test_example_output.csv')
|
||||
remove_file('test_example_output.json')
|
||||
|
||||
|
||||
def remove_file(filename):
|
||||
if os.path.exists(filename): os.remove(filename)
|
||||
|
||||
|
||||
def read_csv(filename):
|
||||
"""Returns a list of lines."""
|
||||
with open(filename, "r") as infile:
|
||||
|
|
@ -65,6 +68,7 @@ def read_csv(filename):
|
|||
lines.append(line)
|
||||
return lines
|
||||
|
||||
|
||||
def read_json(filename):
|
||||
"""Returns a list or a dictionary."""
|
||||
with open(filename, "r") as infile:
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue