Add Justfile for task execution
Remove all config files present in roles/config/files
This commit is contained in:
parent
bccb6e2953
commit
dc9b62a8f2
17
JUSTFILE
Normal file
17
JUSTFILE
Normal file
@ -0,0 +1,17 @@
|
||||
# Presets for setup and config installation
|
||||
|
||||
# List all available recipes
|
||||
help:
|
||||
@just --list --justfile {{justfile()}}
|
||||
|
||||
# first time setup
|
||||
setup:
|
||||
ansible-playbook setup.yml --tags all
|
||||
|
||||
# copy configs and services
|
||||
config:
|
||||
ansible-playbook setup.yml --tags "config,services"
|
||||
|
||||
# install packages
|
||||
packages:
|
||||
ansible-playbook setup.yml --tags "packages"
|
12
README.md
12
README.md
@ -10,25 +10,23 @@ First time installation:
|
||||
|
||||
```sh
|
||||
pacman -Syu
|
||||
pacman -S git ansible
|
||||
pacman -S git ansible just
|
||||
cd $(mktemp -d)
|
||||
git clone https://gitea.exu.li/exu/configs.git
|
||||
cd configs
|
||||
ansible-playbook setup.yml
|
||||
just setup
|
||||
```
|
||||
|
||||
### Config updates
|
||||
|
||||
Either use the included alias
|
||||
|
||||
```sh
|
||||
upconf
|
||||
just config
|
||||
```
|
||||
|
||||
or run the script directly.
|
||||
### Package installation
|
||||
|
||||
``` sh
|
||||
~/scripts/arch-config.sh
|
||||
just packages
|
||||
```
|
||||
|
||||
## Other
|
||||
|
@ -1,71 +0,0 @@
|
||||
### MangoHud configuration file
|
||||
### Uncomment any options you wish to enable. Default options are left uncommented
|
||||
### Use some_parameter=0 to disable a parameter (only works with on/off parameters)
|
||||
### Everything below can be used / overridden with the environment variable MANGOHUD_CONFIG instead
|
||||
|
||||
################ PERFORMANCE #################
|
||||
|
||||
### Limit the application FPS
|
||||
# fps_limit=
|
||||
|
||||
### VSYNC [0-3] 0 = adaptive; 1 = off; 2 = mailbox; 3 = on
|
||||
# vsync=
|
||||
|
||||
################### VISUAL ###################
|
||||
|
||||
### Display the current CPU information
|
||||
cpu_stats
|
||||
cpu_temp
|
||||
|
||||
### Display the current GPU information
|
||||
gpu_name
|
||||
gpu_stats
|
||||
gpu_temp
|
||||
gpu_power
|
||||
|
||||
### Display the frametime line graph
|
||||
frame_timing
|
||||
|
||||
### Display the current system time
|
||||
# time
|
||||
|
||||
### Change the hud font size (default is 24)
|
||||
font_size=20
|
||||
|
||||
### Change the hud position (default is top-left)
|
||||
position=top-left
|
||||
|
||||
### Display the current CPU load & frequency for each core
|
||||
core_load
|
||||
|
||||
### Display system ram / vram usage
|
||||
ram
|
||||
vram
|
||||
|
||||
### Disable / hide the hud by deafult
|
||||
# no_display
|
||||
|
||||
### Hud position offset
|
||||
# offset_x=
|
||||
# offset_y=
|
||||
|
||||
### Hud dimensions
|
||||
# width=
|
||||
# height=
|
||||
|
||||
### Hud transparency / alpha
|
||||
background_alpha=0.5
|
||||
|
||||
### Crosshair overlay (default size is 30)
|
||||
# crosshair
|
||||
# crosshair_size=
|
||||
# crosshair_color=RRGGBB
|
||||
|
||||
### Output file
|
||||
output_file /home/marc/Dokumente/mangohud.log
|
||||
|
||||
################## INTERACTION #################
|
||||
|
||||
### Change toggle keybinds for the hud & logging
|
||||
toggle_hud=Shift_L+F12
|
||||
toggle_logging=Shift_L+F2
|
@ -1 +0,0 @@
|
||||
<mxfile host="Electron" modified="2022-09-16T06:39:11.723Z" agent="5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) draw.io/20.3.0 Chrome/104.0.5112.114 Electron/20.1.3 Safari/537.36" etag="3qCqwMKNhbMENT5RahAg" version="20.3.0" type="device"><diagram id="sNatUqG7UvmBRNr3f2vi" name="Seite-1">dZFND8IgDIZ/DfcBRvE8vy6edvBMRh0kbF0YZtNf7xaYk0xPlOd9S2lLeF4PZydbfUUFlrBMDYQfCGN0Q8V4TOQZiBA0gMoZFU0LKMwLIswifRgFXWL0iNabNoUlNg2UPmHSOexT2x1tWrWVFaxAUUq7pjejvI6UbveLcAFT6VhasF0QajmbYyedlgr7L8SPhOcO0YeoHnKw0/DmuYS80x/18zEHjf+RMAbL2+Ml2RA/vgE=</diagram></mxfile>
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -1,3 +0,0 @@
|
||||
<link href="/home/marc/GitProjects/markdown-css/markdown.css" rel="stylesheet"></link>
|
||||
|
||||
# Document Title
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -1,8 +0,0 @@
|
||||
json:{
|
||||
"prefer_author_sort": false,
|
||||
"toc_title": null,
|
||||
"mobi_toc_at_start": false,
|
||||
"dont_compress": false,
|
||||
"no_inline_toc": false,
|
||||
"share_not_sync": false
|
||||
}
|
@ -1,17 +0,0 @@
|
||||
json:{
|
||||
"colors": 0,
|
||||
"dont_normalize": false,
|
||||
"keep_aspect_ratio": false,
|
||||
"right2left": false,
|
||||
"despeckle": false,
|
||||
"no_sort": false,
|
||||
"no_process": false,
|
||||
"landscape": false,
|
||||
"dont_sharpen": false,
|
||||
"disable_trim": false,
|
||||
"wide": false,
|
||||
"output_format": "png",
|
||||
"dont_grayscale": false,
|
||||
"comic_image_size": null,
|
||||
"dont_add_comic_pages_to_toc": false
|
||||
}
|
@ -1,5 +0,0 @@
|
||||
json:{
|
||||
"docx_no_cover": false,
|
||||
"docx_no_pagebreaks_between_notes": false,
|
||||
"docx_inline_subsup": false
|
||||
}
|
@ -1,11 +0,0 @@
|
||||
json:{
|
||||
"docx_page_size": "letter",
|
||||
"docx_custom_page_size": null,
|
||||
"docx_no_cover": false,
|
||||
"docx_no_toc": false,
|
||||
"docx_page_margin_left": 72.0,
|
||||
"docx_page_margin_top": 72.0,
|
||||
"docx_page_margin_right": 72.0,
|
||||
"docx_page_margin_bottom": 72.0,
|
||||
"preserve_cover_aspect_ratio": false
|
||||
}
|
@ -1,12 +0,0 @@
|
||||
json:{
|
||||
"dont_split_on_page_breaks": false,
|
||||
"flow_size": 0,
|
||||
"no_default_epub_cover": false,
|
||||
"no_svg_cover": false,
|
||||
"epub_inline_toc": false,
|
||||
"epub_toc_at_end": false,
|
||||
"toc_title": null,
|
||||
"preserve_cover_aspect_ratio": true,
|
||||
"epub_flatten": false,
|
||||
"epub_version": "3"
|
||||
}
|
@ -1,3 +0,0 @@
|
||||
json:{
|
||||
"no_inline_fb2_toc": false
|
||||
}
|
@ -1,4 +0,0 @@
|
||||
json:{
|
||||
"sectionize": "files",
|
||||
"fb2_genre": "antique"
|
||||
}
|
@ -1,13 +0,0 @@
|
||||
json:{
|
||||
"enable_heuristics": false,
|
||||
"markup_chapter_headings": true,
|
||||
"italicize_common_cases": true,
|
||||
"fix_indents": true,
|
||||
"html_unwrap_factor": 0.4,
|
||||
"unwrap_lines": true,
|
||||
"delete_blank_paragraphs": true,
|
||||
"format_scene_breaks": true,
|
||||
"replace_scene_breaks": "",
|
||||
"dehyphenate": true,
|
||||
"renumber_headings": true
|
||||
}
|
@ -1,5 +0,0 @@
|
||||
json:{
|
||||
"htmlz_css_type": "class",
|
||||
"htmlz_class_style": "external",
|
||||
"htmlz_title_filename": false
|
||||
}
|
@ -1,3 +0,0 @@
|
||||
json:{
|
||||
"allow_conversion_with_errors": true
|
||||
}
|
@ -1,26 +0,0 @@
|
||||
json:{
|
||||
"change_justification": "original",
|
||||
"extra_css": null,
|
||||
"base_font_size": 0.0,
|
||||
"font_size_mapping": null,
|
||||
"line_height": 0.0,
|
||||
"minimum_line_height": 120.0,
|
||||
"embed_font_family": null,
|
||||
"embed_all_fonts": false,
|
||||
"subset_embedded_fonts": false,
|
||||
"smarten_punctuation": false,
|
||||
"unsmarten_punctuation": false,
|
||||
"disable_font_rescaling": false,
|
||||
"insert_blank_line": false,
|
||||
"remove_paragraph_spacing": false,
|
||||
"remove_paragraph_spacing_indent_size": 1.5,
|
||||
"insert_blank_line_size": 0.5,
|
||||
"input_encoding": null,
|
||||
"filter_css": "",
|
||||
"expand_css": false,
|
||||
"asciiize": false,
|
||||
"keep_ligatures": false,
|
||||
"linearize_tables": false,
|
||||
"transform_css_rules": "[]",
|
||||
"transform_html_rules": "[]"
|
||||
}
|
@ -1,13 +0,0 @@
|
||||
json:{
|
||||
"wordspace": 2.5,
|
||||
"header": false,
|
||||
"header_format": "%t by %a",
|
||||
"minimum_indent": 0.0,
|
||||
"serif_family": null,
|
||||
"render_tables_as_images": false,
|
||||
"sans_family": null,
|
||||
"mono_family": null,
|
||||
"text_size_multiplier_for_rendered_tables": 1.0,
|
||||
"autorotation": false,
|
||||
"header_separation": 0.0
|
||||
}
|
@ -1,12 +0,0 @@
|
||||
json:{
|
||||
"prefer_author_sort": false,
|
||||
"toc_title": null,
|
||||
"mobi_keep_original_images": false,
|
||||
"mobi_ignore_margins": false,
|
||||
"mobi_toc_at_start": false,
|
||||
"dont_compress": false,
|
||||
"no_inline_toc": false,
|
||||
"share_not_sync": false,
|
||||
"personal_doc": "[PDOC]",
|
||||
"mobi_file_type": "old"
|
||||
}
|
@ -1,8 +0,0 @@
|
||||
json:{
|
||||
"margin_top": 5.0,
|
||||
"margin_left": 5.0,
|
||||
"margin_right": 5.0,
|
||||
"margin_bottom": 5.0,
|
||||
"input_profile": "default",
|
||||
"output_profile": "tablet"
|
||||
}
|
@ -1,5 +0,0 @@
|
||||
json:{
|
||||
"format": "doc",
|
||||
"inline_toc": false,
|
||||
"pdb_output_encoding": "cp1252"
|
||||
}
|
@ -1,4 +0,0 @@
|
||||
json:{
|
||||
"no_images": false,
|
||||
"unwrap_factor": 0.45
|
||||
}
|
@ -1,26 +0,0 @@
|
||||
json:{
|
||||
"use_profile_size": false,
|
||||
"paper_size": "letter",
|
||||
"custom_size": null,
|
||||
"pdf_hyphenate": false,
|
||||
"preserve_cover_aspect_ratio": false,
|
||||
"pdf_serif_family": "Nimbus Roman",
|
||||
"unit": "inch",
|
||||
"pdf_sans_family": "Nimbus Sans [UKWN]",
|
||||
"pdf_mono_family": "Nimbus Mono PS",
|
||||
"pdf_standard_font": "serif",
|
||||
"pdf_default_font_size": 20,
|
||||
"pdf_mono_font_size": 16,
|
||||
"pdf_page_numbers": false,
|
||||
"pdf_footer_template": null,
|
||||
"pdf_header_template": null,
|
||||
"pdf_add_toc": false,
|
||||
"toc_title": null,
|
||||
"pdf_page_margin_left": 72.0,
|
||||
"pdf_page_margin_top": 72.0,
|
||||
"pdf_page_margin_right": 72.0,
|
||||
"pdf_page_margin_bottom": 72.0,
|
||||
"pdf_use_document_margins": false,
|
||||
"pdf_page_number_map": null,
|
||||
"pdf_odd_even_offset": 0.0
|
||||
}
|
@ -1,5 +0,0 @@
|
||||
json:{
|
||||
"inline_toc": false,
|
||||
"full_image_depth": false,
|
||||
"pml_output_encoding": "cp1252"
|
||||
}
|
@ -1,3 +0,0 @@
|
||||
json:{
|
||||
"inline_toc": false
|
||||
}
|
@ -1,3 +0,0 @@
|
||||
json:{
|
||||
"ignore_wmf": false
|
||||
}
|
@ -1,9 +0,0 @@
|
||||
json:{
|
||||
"search_replace": "[]",
|
||||
"sr1_search": null,
|
||||
"sr1_replace": null,
|
||||
"sr2_search": null,
|
||||
"sr2_replace": null,
|
||||
"sr3_search": null,
|
||||
"sr3_replace": null
|
||||
}
|
@ -1,6 +0,0 @@
|
||||
json:{
|
||||
"snb_insert_empty_line": false,
|
||||
"snb_dont_indent_first_line": false,
|
||||
"snb_hide_chapter_name": false,
|
||||
"snb_full_screen": false
|
||||
}
|
@ -1,9 +0,0 @@
|
||||
json:{
|
||||
"chapter": "//*[((name()='h1' or name()='h2') and re:test(., '\\s*((chapter|book|section|part)\\s+)|((prolog|prologue|epilogue)(\\s+|$))', 'i')) or @class = 'chapter']",
|
||||
"chapter_mark": "pagebreak",
|
||||
"start_reading_at": null,
|
||||
"remove_first_image": false,
|
||||
"remove_fake_margins": true,
|
||||
"insert_metadata": false,
|
||||
"page_breaks_before": "//*[name()='h1' or name()='h2']"
|
||||
}
|
@ -1,11 +0,0 @@
|
||||
json:{
|
||||
"level1_toc": null,
|
||||
"level2_toc": null,
|
||||
"level3_toc": null,
|
||||
"toc_threshold": 6,
|
||||
"max_toc_links": 50,
|
||||
"no_chapters_in_toc": false,
|
||||
"use_auto_toc": false,
|
||||
"toc_filter": null,
|
||||
"duplicate_links_in_toc": false
|
||||
}
|
@ -1,7 +0,0 @@
|
||||
json:{
|
||||
"paragraph_type": "auto",
|
||||
"formatting_type": "auto",
|
||||
"markdown_extensions": "footnotes, tables, toc",
|
||||
"preserve_spaces": false,
|
||||
"txt_in_remove_indents": false
|
||||
}
|
@ -1,11 +0,0 @@
|
||||
json:{
|
||||
"newline": "system",
|
||||
"max_line_length": 0,
|
||||
"force_max_line_length": false,
|
||||
"inline_toc": false,
|
||||
"txt_output_formatting": "plain",
|
||||
"keep_links": false,
|
||||
"keep_image_references": false,
|
||||
"keep_color": false,
|
||||
"txt_output_encoding": "utf-8"
|
||||
}
|
@ -1,11 +0,0 @@
|
||||
json:{
|
||||
"newline": "system",
|
||||
"max_line_length": 0,
|
||||
"force_max_line_length": false,
|
||||
"inline_toc": false,
|
||||
"txt_output_formatting": "plain",
|
||||
"keep_links": false,
|
||||
"keep_image_references": false,
|
||||
"keep_color": false,
|
||||
"txt_output_encoding": "utf-8"
|
||||
}
|
@ -1,4 +0,0 @@
|
||||
{
|
||||
"last_used_colors": "Grass",
|
||||
"last_used_style": "Banner"
|
||||
}
|
@ -1,20 +0,0 @@
|
||||
{
|
||||
"disabled_plugins": {
|
||||
"__class__": "set",
|
||||
"__value__": []
|
||||
},
|
||||
"enabled_plugins": {
|
||||
"__class__": "set",
|
||||
"__value__": [
|
||||
"DeDRM"
|
||||
]
|
||||
},
|
||||
"filetype_mapping": {},
|
||||
"plugin_customization": {},
|
||||
"plugins": {
|
||||
"DeACSM": "/home/marc/.config/calibre/plugins/DeACSM.zip",
|
||||
"DeDRM": "/home/marc/.config/calibre/plugins/DeDRM.zip",
|
||||
"KFX Input": "/home/exu/.config/calibre/plugins/KFX Input.zip",
|
||||
"Obok DeDRM": "/home/marc/.config/calibre/plugins/Obok DeDRM.zip"
|
||||
}
|
||||
}
|
@ -1,45 +0,0 @@
|
||||
{
|
||||
"DeDRMexport_Kindle_for_Mac_and_PC_Key_keys": "/home/marc/Downloads/default_key.k4i",
|
||||
"DeDRMimport_Adobe_Digital_Editions_Key_keys": "/home/marc/Nextcloud/backups",
|
||||
"Export ADE activation files": "/home/marc/Nextcloud/backups/adobe_account_backup_uuid_2d6cfbec-33fd-43ca-bcf9-e8b281114a17.zip",
|
||||
"Export ADE keys": "/home/marc/Nextcloud/backups/adobe_uuid_2d6cfbec-33fd-43ca-bcf9-e8b281114a17.der",
|
||||
"add a plugin dialog": "/home/marc/Downloads/DeDRM_tools_10.0.3",
|
||||
"add books dialog dir": "/home/exu/Nextcloud/Reading/books/Brandon Sanderson - The Stormlight Archive",
|
||||
"add books dialog dir-last-used-filter-spec-all-files": false,
|
||||
"choose calibre library": "/home/exu/.local/share/Calibre-Library",
|
||||
"database location dialog": "/home/marc/Nextcloud/Books",
|
||||
"library_delete_books_again": false,
|
||||
"notified-version-updates": {
|
||||
"__class__": "set",
|
||||
"__value__": [
|
||||
"6.0",
|
||||
"5.24",
|
||||
"5.28",
|
||||
"5.25",
|
||||
"7.19",
|
||||
"7.18"
|
||||
]
|
||||
},
|
||||
"recursive book import root dir dialog": "/home/exu/Nextcloud/Reading/books/Brandon Sanderson - The Stormlight Archive",
|
||||
"save to disk dialog": "/home/exu/Downloads/newBooks",
|
||||
"sort_history": [
|
||||
[
|
||||
"series",
|
||||
true
|
||||
],
|
||||
[
|
||||
"authors",
|
||||
true
|
||||
],
|
||||
[
|
||||
"series",
|
||||
true
|
||||
],
|
||||
[
|
||||
"timestamp",
|
||||
false
|
||||
]
|
||||
],
|
||||
"welcome_wizard_device": "default",
|
||||
"welcome_wizard_was_run": true
|
||||
}
|
@ -1,10 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>lineedit_history_tweak_book_find_edit</key>
|
||||
<array>
|
||||
<string>font</string>
|
||||
</array>
|
||||
</dict>
|
||||
</plist>
|
File diff suppressed because it is too large
Load Diff
@ -1,51 +0,0 @@
|
||||
{
|
||||
"add_formats_to_existing": false,
|
||||
"case_sensitive": false,
|
||||
"check_for_dupes_on_ctl": false,
|
||||
"database_path": "/home/marc/library1.db",
|
||||
"filename_pattern": "(?P<title>.+) - (?P<author>[^_]+)",
|
||||
"input_format_order": [
|
||||
"EPUB",
|
||||
"AZW3",
|
||||
"MOBI",
|
||||
"LIT",
|
||||
"PRC",
|
||||
"FB2",
|
||||
"HTML",
|
||||
"HTM",
|
||||
"XHTM",
|
||||
"SHTML",
|
||||
"XHTML",
|
||||
"ZIP",
|
||||
"DOCX",
|
||||
"ODT",
|
||||
"RTF",
|
||||
"PDF",
|
||||
"TXT"
|
||||
],
|
||||
"installation_uuid": "95258752-0a69-416a-90ff-c20df0267b24",
|
||||
"isbndb_com_key": "",
|
||||
"language": "de",
|
||||
"library_path": "/home/exu/.local/share/Calibre-Library",
|
||||
"limit_search_columns": false,
|
||||
"limit_search_columns_to": [
|
||||
"title",
|
||||
"authors",
|
||||
"tags",
|
||||
"series",
|
||||
"publisher"
|
||||
],
|
||||
"manage_device_metadata": "manual",
|
||||
"mark_new_books": false,
|
||||
"migrated": false,
|
||||
"network_timeout": 5,
|
||||
"new_book_tags": [],
|
||||
"numeric_collation": false,
|
||||
"output_format": "epub",
|
||||
"read_file_metadata": true,
|
||||
"saved_searches": {},
|
||||
"swap_author_names": false,
|
||||
"use_primary_find_in_search": true,
|
||||
"user_categories": {},
|
||||
"worker_process_priority": "normal"
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@ -1,81 +0,0 @@
|
||||
{
|
||||
"LRF_conversion_defaults": [],
|
||||
"LRF_ebook_viewer_options": null,
|
||||
"asked_library_thing_password": false,
|
||||
"auto_download_cover": false,
|
||||
"autolaunch_server": false,
|
||||
"column_map": [
|
||||
"title",
|
||||
"ondevice",
|
||||
"authors",
|
||||
"size",
|
||||
"timestamp",
|
||||
"rating",
|
||||
"publisher",
|
||||
"tags",
|
||||
"series",
|
||||
"pubdate"
|
||||
],
|
||||
"confirm_delete": false,
|
||||
"cover_flow_queue_length": 6,
|
||||
"default_send_to_device_action": "DeviceAction:main::False:False",
|
||||
"delete_news_from_library_on_upload": false,
|
||||
"disable_animations": false,
|
||||
"disable_tray_notification": false,
|
||||
"enforce_cpu_limit": true,
|
||||
"get_social_metadata": true,
|
||||
"gui_layout": "wide",
|
||||
"highlight_search_matches": false,
|
||||
"internally_viewed_formats": [
|
||||
"LRF",
|
||||
"EPUB",
|
||||
"LIT",
|
||||
"MOBI",
|
||||
"PRC",
|
||||
"POBI",
|
||||
"AZW",
|
||||
"AZW3",
|
||||
"HTML",
|
||||
"FB2",
|
||||
"PDB",
|
||||
"RB",
|
||||
"SNB",
|
||||
"HTMLZ",
|
||||
"KEPUB"
|
||||
],
|
||||
"jobs_search_history": [],
|
||||
"lrf_viewer_search_history": [],
|
||||
"main_search_history": [],
|
||||
"main_window_geometry": {
|
||||
"__class__": "bytearray",
|
||||
"__value__": "AdnQywADAAAAAAAAAAAAAAAAB38AAAQjAAAAAgAAAAIAAAd9AAAEIQAAAAAAAAAAB4AAAAACAAAAAgAAB30AAAQh"
|
||||
},
|
||||
"match_tags_type": "any",
|
||||
"new_version_notification": true,
|
||||
"oldest_news": 60,
|
||||
"overwrite_author_title_metadata": true,
|
||||
"plugin_search_history": [],
|
||||
"save_to_disk_template_history": [
|
||||
"{authors} - {series}/{series} {series_index} - {title}",
|
||||
"{author_sort}/{title}/{title} - {authors}"
|
||||
],
|
||||
"scheduler_search_history": [],
|
||||
"search_as_you_type": false,
|
||||
"send_to_device_template_history": [],
|
||||
"send_to_storage_card_by_default": false,
|
||||
"separate_cover_flow": false,
|
||||
"shortcuts_search_history": [],
|
||||
"show_avg_rating": true,
|
||||
"sort_tags_by": "name",
|
||||
"systray_icon": false,
|
||||
"tag_browser_hidden_categories": {
|
||||
"__class__": "set",
|
||||
"__value__": []
|
||||
},
|
||||
"tweaks_search_history": [],
|
||||
"upload_news_to_device": true,
|
||||
"use_roman_numerals_for_series_number": true,
|
||||
"viewer_search_history": [],
|
||||
"viewer_toc_search_history": [],
|
||||
"worker_limit": 6
|
||||
}
|
@ -1,10 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>lineedit_history_xpath_edit_opt_chapter</key>
|
||||
<array>
|
||||
<string>//*[((name()='h1' or name()='h2') and re:test(., '\s*((chapter|book|section|part)\s+)|((prolog|prologue|epilogue)(\s+|$))', 'i')) or @class = 'chapter']</string>
|
||||
</array>
|
||||
</dict>
|
||||
</plist>
|
File diff suppressed because one or more lines are too long
@ -1,7 +0,0 @@
|
||||
{
|
||||
"domain": "uk",
|
||||
"ignore_fields": [],
|
||||
"prefer_kindle_edition": true,
|
||||
"server": "auto",
|
||||
"use_mobi_asin": true
|
||||
}
|
@ -1,4 +0,0 @@
|
||||
{
|
||||
"ignore_fields": [],
|
||||
"max_covers": 5
|
||||
}
|
@ -1,3 +0,0 @@
|
||||
{
|
||||
"ignore_fields": []
|
||||
}
|
@ -1,3 +0,0 @@
|
||||
{
|
||||
"ignore_fields": []
|
||||
}
|
@ -1,3 +0,0 @@
|
||||
{
|
||||
"ignore_fields": []
|
||||
}
|
@ -1,11 +0,0 @@
|
||||
{
|
||||
"cover_priorities": {
|
||||
"Big Book Search": 2,
|
||||
"Goodreads": 3,
|
||||
"Google": 2,
|
||||
"Google Images": 2
|
||||
},
|
||||
"fewer_tags": true,
|
||||
"ignore_fields": [],
|
||||
"max_tags": 10
|
||||
}
|
@ -1,11 +0,0 @@
|
||||
{
|
||||
"blacklist": [
|
||||
"9d273cd5"
|
||||
],
|
||||
"history": {
|
||||
"9d273cd5": [
|
||||
"ONEPLUS A3003",
|
||||
"2021-05-15T18:17:07.571225+00:00"
|
||||
]
|
||||
}
|
||||
}
|
Binary file not shown.
@ -1,28 +0,0 @@
|
||||
<?xml version="1.0"?>
|
||||
<activationInfo xmlns="http://ns.adobe.com/adept">
|
||||
<adept:activationServiceInfo xmlns:adept="http://ns.adobe.com/adept">
|
||||
<adept:authURL>http://adeactivate.adobe.com/adept</adept:authURL>
|
||||
<adept:userInfoURL>http://adeactivate.adobe.com/adept</adept:userInfoURL>
|
||||
<adept:activationURL>http://adeactivate.adobe.com/adept</adept:activationURL>
|
||||
<adept:certificate>MIIEsjCCA5qgAwIBAgIER2q5eDANBgkqhkiG9w0BAQUFADCBhDELMAkGA1UEBhMCVVMxIzAhBgNVBAoTGkFkb2JlIFN5c3RlbXMgSW5jb3Jwb3JhdGVkMRswGQYDVQQLExJEaWdpdGFsIFB1Ymxpc2hpbmcxMzAxBgNVBAMTKkFkb2JlIENvbnRlbnQgU2VydmVyIENlcnRpZmljYXRlIEF1dGhvcml0eTAeFw0wODAxMDkxODM3NDVaFw0xMzAxMDkxOTA3NDVaMH0xCzAJBgNVBAYTAlVTMSMwIQYDVQQKExpBZG9iZSBTeXN0ZW1zIEluY29ycG9yYXRlZDEbMBkGA1UECxMSRGlnaXRhbCBQdWJsaXNoaW5nMSwwKgYDVQQDEyNodHRwOi8vYWRlYWN0aXZhdGUuYWRvYmUuY29tL2FkZXB0LzCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAyXpCCWFh0Q3Bi1S7xf+CJfMd+cZz3HB0NknDScB1Cs8KdU0ygO7iqAgdiAdPliITkUTVEgUPvK+4yYCUderzBjq13/IrKlwEAyWeNgssJekpYgqNywo7Md1OApXzM47wVThNePNydhGYuNEEDDxzO+0JxucfhfArwnp7kIWA6q8CAwEAAaOCAbQwggGwMAsGA1UdDwQEAwIFoDBYBglghkgBhvprHgEESwxJVGhlIHByaXZhdGUga2V5IGNvcnJlc3BvbmRpbmcgdG8gdGhpcyBjZXJ0aWZpY2F0ZSBtYXkgaGF2ZSBiZWVuIGV4cG9ydGVkLjAUBgNVHSUEDTALBgkqhkiG9y8CAQQwgbIGA1UdIASBqjCBpzCBpAYJKoZIhvcvAQIDMIGWMIGTBggrBgEFBQcCAjCBhhqBg1lvdSBhcmUgbm90IHBlcm1pdHRlZCB0byB1c2UgdGhpcyBMaWNlbnNlIENlcnRpZmljYXRlIGV4Y2VwdCBhcyBwZXJtaXR0ZWQgYnkgdGhlIGxpY2Vuc2UgYWdyZWVtZW50IGFjY29tcGFueWluZyB0aGUgQWRvYmUgc29mdHdhcmUuMDEGA1UdHwQqMCgwJqAkoCKGIGh0dHA6Ly9jcmwuYWRvYmUuY29tL2Fkb2JlQ1MuY3JsMB8GA1UdIwQYMBaAFIvu8IFgyaLaHg5SwVgMBLBD94/oMB0GA1UdDgQWBBT9A+kXOPL6N57MN/zovbCGEx2+BTAJBgNVHRMEAjAAMA0GCSqGSIb3DQEBBQUAA4IBAQBVjUalliql3VjpLdT8si7OwPU1wQODllwlgfLH7tI/Ubq5wHDlprGtbf3jZm6tXY1qmh9mz1WnTmQHU3uPk8qgpihrpx4HJTjhAhLP0CXU1rd/t5whwhgT1lYfw77RRG2lZ5BzpHb/XjnY5yc3awd6F3Dli6kTkbcPyOCNoXlW4wiF+jkL+jBImY8xo2EewiJioY/iTYZH5HF+PjHF5mffANiLK/Q43l4f0YF8UagTfAJkD3iQV9lrTOWxKBgpfdyvekGqFCDq9AKzfpllqctxsC29W5bXU0cVYzf6Bj5ALs6tyi7r5fsIPSwszH/i4ixsuD0qccIgTXCwMNbt9zQu</adept:certificate>
|
||||
<adept:authenticationCertificate>MIIEYDCCA0igAwIBAgIER2q5eTANBgkqhkiG9w0BAQUFADCBhDELMAkGA1UEBhMCVVMxIzAhBgNVBAoTGkFkb2JlIFN5c3RlbXMgSW5jb3Jwb3JhdGVkMRswGQYDVQQLExJEaWdpdGFsIFB1Ymxpc2hpbmcxMzAxBgNVBAMTKkFkb2JlIENvbnRlbnQgU2VydmVyIENlcnRpZmljYXRlIEF1dGhvcml0eTAeFw0wODAxMDkxODQzNDNaFw0xODAxMzEwODAwMDBaMHwxKzApBgNVBAMTImh0dHA6Ly9hZGVhY3RpdmF0ZS5hZG9iZS5jb20vYWRlcHQxGzAZBgNVBAsTEkRpZ2l0YWwgUHVibGlzaGluZzEjMCEGA1UEChMaQWRvYmUgU3lzdGVtcyBJbmNvcnBvcmF0ZWQxCzAJBgNVBAYTAlVTMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDZAxpzOZ7N38ZGlQjfMY/lfu4Ta4xK3FRm069VwdqGZIwrfTTRxnLE4A9i1X00BnNk/5z7C0pQX435ylIEQPxIFBKTH+ip5rfDNh/Iu6cIlB0N4I/t7Pac8cIDwbc9HxcGTvXg3BFqPjaGVbmVZmoUtSVOsphdA43sZc6j1iFfOQIDAQABo4IBYzCCAV8wEgYDVR0TAQH/BAgwBgEB/wIBATAUBgNVHSUEDTALBgkqhkiG9y8CAQUwgbIGA1UdIASBqjCBpzCBpAYJKoZIhvcvAQIDMIGWMIGTBggrBgEFBQcCAjCBhhqBg1lvdSBhcmUgbm90IHBlcm1pdHRlZCB0byB1c2UgdGhpcyBMaWNlbnNlIENlcnRpZmljYXRlIGV4Y2VwdCBhcyBwZXJtaXR0ZWQgYnkgdGhlIGxpY2Vuc2UgYWdyZWVtZW50IGFjY29tcGFueWluZyB0aGUgQWRvYmUgc29mdHdhcmUuMDEGA1UdHwQqMCgwJqAkoCKGIGh0dHA6Ly9jcmwuYWRvYmUuY29tL2Fkb2JlQ1MuY3JsMAsGA1UdDwQEAwIBBjAfBgNVHSMEGDAWgBSL7vCBYMmi2h4OUsFYDASwQ/eP6DAdBgNVHQ4EFgQU9RP19K+lzF03he+0T47hCVkPhdAwDQYJKoZIhvcNAQEFBQADggEBAJoqOj+bUa+bDYyOSljs6SVzWH2BN2ylIeZKpTQYEo7jA62tRqW/rBZcNIgCudFvEYa7vH8lHhvQak1s95g+NaNidb5tpgbS8Q7/XTyEGS/4Q2HYWHD/8ydKFROGbMhfxpdJgkgn21mb7dbsfq5AZVGS3M4PP1xrMDYm50+Sip9QIm1RJuSaKivDa/piA5p8/cv6w44YBefLzGUN674Y7WS5u656MjdyJsN/7Oup+12fHGiye5QS5mToujGd6LpU80gfhNxhrphASiEBYQ/BUhWjHkSi0j4WOiGvGpT1Xvntcj0rf6XV6lNrOddOYUL+KdC1uDIe8PUI+naKI+nWgrs=</adept:authenticationCertificate>
|
||||
</adept:activationServiceInfo>
|
||||
|
||||
<adept:credentials xmlns:adept="http://ns.adobe.com/adept">
|
||||
<adept:user>urn:uuid:2d6cfbec-33fd-43ca-bcf9-e8b281114a17</adept:user>
|
||||
<adept:username method="AdobeID">adobe@frm01.net</adept:username>
|
||||
<adept:pkcs12>MIIICgIBAzCCB8MGCSqGSIb3DQEHAaCCB7QEggewMIIHrDCCA3AGCSqGSIb3DQEHAaCCA2EEggNdMIIDWTCCA1UGCyqGSIb3DQEMCgECoIICszCCAq8wKQYKKoZIhvcNAQwBAzAbBBQYnc0dHYbt/zeyPvEbYrhgbYyxgwIDAMNQBIICgFqwh/cekpmgYtd57bU3rUEJEohVOC1OrXVh3j9b8UE7RHwiI04O9D0TZtZv0y6IH4VotY/t0j71JAHpXQwVnyQgSB0zrqi7inwJ8p/xFCPvrS/brzZk4hGmSRMfaeyoaqZhTYHThAFw3Hyz7FqmqU5p8bfggSwQ/xOviU7Ct+nBAfkrpaiNfeTQdY63lMKHghYL4IwmZMs6omaVN0ngMuN4/Nhfp7Ij0FK8SmpMMsRJCXSAAk6VjxNBKlLHwgkA4C+qxdyK7LXLd5JO3lGWpR2x+mBHalYt07xaq6OuuNWQKv0Ho1o75Rv3Blibnj8dvcweb3/3aEP0G3p+BfU2bFKrb3pAn3ClsPO1JJnncYdOLmNdkUlmbHK2RIAgAIqE/aJy1QblRcN34drbbV7FMEHScMdaUf6HVTvEj2TkZRT56GxpI4nhIE9KVzzUrj03COLYkRsfkp1NhfBJC1W324Q9Qd4veQWgpAGzYrpN9KwfQsMvNAEijuV0ExYcDxhcp+8cPcCcyjvm8DICAHKCnHtyDHFTutXR6fMQ8Jphu7uz4Sm++YDXby5M+Kb4luK5u8+PDlqfJ5LGAJ54MlzqGUK0OK/NN9U76ga5ekpFeU5wq2DTCQIZ/M4QuHcaTXpme8YempZzcnFWLh5HD/HDQOddCPoxvinGcjiPxC7MWOMKvWPu/oETHNUWYvCz12O8EycWIi4a0dW6Sa0DlC2S0wBBP2lgvd41/M/CksFuqUjiww9CsBMwzbFlmv0ebi9ZKD7IHVz68s+DV3swIGVq7EKSlCGrHNbthnAeOIlk1lwRUTn09a8cRDo8tomY6cBxxdlyqN24mq0vFhwTfqO2CBgxgY4waQYJKoZIhvcNAQkUMVweWgB1AHIAbgA6AHUAdQBpAGQAOgAyAGQANgBjAGYAYgBlAGMALQAzADMAZgBkAC0ANAAzAGMAYQAtAGIAYwBmADkALQBlADgAYgAyADgAMQAxADEANABhADEANzAhBgkqhkiG9w0BCRUxFAQSVGltZSAxNjU4ODU5ODkzNjE4MIIENAYJKoZIhvcNAQcGoIIEJTCCBCECAQAwggQaBgkqhkiG9w0BBwEwKQYKKoZIhvcNAQwBBjAbBBS6J/ZgKvOxIDg+c1iftQoiDAEjXQIDAMNQgIID4Hvb0y7+KcU6CEWDRPRqY6v9CGWmZxT2Ih2P0azuqio0N3tOb+fl4vymhvcJzdGAG1wTX9c2BpZBM7hMVq1YZuM1/rpdXSnbizBYa+ZzcbbYoNU77yQujwQtcJG1JwZI+VCJMJ+rXCKP+0ebIuCmh7AvcfN2tj48h3TFIeRbeic+YjvqB7RifbnaoVkV56L4k2TeKy1yrTqE8K5I7lICEVJA/ouXWOHdpMcse4SGRZ6n3JmZnC6aCoZTwKydQ0HGW7EvkmK+b3DS8gf3SGLvDEz3k3H41oYvr8zAN40ID7a71wdKHIlDcSeLkzZt6YNZBMXeH8ZlNf3s2qiCX79lRHuKlH7GYjNoUoLlAKO1OSwluxr6qKsLizOrmj7pgledFGYW5iCxVUooXneou/fDJtpOZGVKugPmwT5hL4ouiDI7R7gzWaGDjK0EIorthBTOymwj+hMx5wJ+2g42a0UyD0n73G5e1zRE7hT5axOXULsYMiKTJMCEopRVV70fjAsInBmDOCjIzRsPKtVIrVpOVJIr0zULLqyPucnaeJHpr9sb9hojUqRRZy55wnb5L7JbNLWnRTGLApnmhTx5KmbBTo7UdpyhWUqsApPrhRf3pMXPPBzkEZgeSEp2un3lnPMNhKVpo9lH2Ox206UXlzCYtrK+i+bH1wJdgdoDXzF01ysad8hHAcZJSUNF/i09DaoaFkX3uuPRbiqt5hgYdXb3E3aA5E/ChA80jFjaMLRBbsXSwvn7Ok9LD+kEUOoFvMyz9HrTS3j7fKQRvu20fn3uzyIkUyva46WlNWP/3KGcq1fWarJAuhYgwZbw3o9LB09w87uISN71Wm5pnYpAiFUo48oiTO/PB6F9F8SPiqR8h40Ghjvp7dFm9HzyXjq14T0GBkyG8RaJn9umc4yz1vOt7wPCXyf92cDltlSstpoY+SKrECTMcmnY//b9fyjRiXU4KJ7idSeKmF6thz1SBZ7FzUSoeozPHs+WOmIrVRJDez7J766oJBstqycIq15UVAPGt09ND/WXmhxDyHfZPlrR49YW87CLTFY8MmiOgd/fr+1gTsZqxLLb7L303MKeOQ8Lb0s4bMqGuCLsW/WcniHFhezN7YwyyAsdFhrI/Ugro2uDFlZ3BgAsipaqyzc4kyfGxjkSuIarzxV7huswH8COHSd0Jf9wj8iCLQSw4gX627C9F9dNy0AqT8uwXa/Z78leKUfSbbUB06Veram5y0kXnWGM97WyYKezxpmUHXUagZb5v6zG3o62/ia43tr8YmmUsJ9eyrl3xA+7RkERta/vryBuse5wQhpjd8F/2IxHnSG0MD4wITAJBgUrDgMCGgUABBSD2oMtYGOzFgykxeJda8+qoJQ7BAQUdwkJW7jL0vGmUxZNCAB2k3wM828CAwGGoA==</adept:pkcs12>
|
||||
<adept:licenseCertificate>MIIDGDCCAoGgAwIBAgIGAYI7wTNyMA0GCSqGSIb3DQEBBQUAMHwxKzApBgNVBAMTImh0dHA6Ly9hZGVhY3RpdmF0ZS5hZG9iZS5jb20vYWRlcHQxGzAZBgNVBAsTEkRpZ2l0YWwgUHVibGlzaGluZzEjMCEGA1UEChMaQWRvYmUgU3lzdGVtcyBJbmNvcnBvcmF0ZWQxCzAJBgNVBAYTAlVTMB4XDTIyMDcyNjE4MjQ1M1oXDTMyMDcyNjE4MjQ1M1owODE2MDQGA1UEAxMtdXJuOnV1aWQ6MmQ2Y2ZiZWMtMzNmZC00M2NhLWJjZjktZThiMjgxMTE0YTE3MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDf+bPePv4GMbGBRssk/3dXktw0nIou8bhWCXha1i+rRi3VpuhHkj02KgBjOaEWEZy3Xkjkv7JGedv2cT0ZShIC6wOvGTsKT2Y3IJpNgWoZzniaWPz1zAGKffe41vjhrVj+8Vbtmt0MhxUbM3uA47echS7Kg9Cp036ydHYX70EeVQIDAQABo4HoMIHlMIG0BgNVHSMEgawwgamAFPUT9fSvpcxdN4XvtE+O4QlZD4XQoYGKpIGHMIGEMQswCQYDVQQGEwJVUzEjMCEGA1UEChMaQWRvYmUgU3lzdGVtcyBJbmNvcnBvcmF0ZWQxGzAZBgNVBAsTEkRpZ2l0YWwgUHVibGlzaGluZzEzMDEGA1UEAxMqQWRvYmUgQ29udGVudCBTZXJ2ZXIgQ2VydGlmaWNhdGUgQXV0aG9yaXR5ggRHarl5MAkGA1UdEwQCMAAwFAYDVR0lBA0wCwYJKoZIhvcvAgEHMAsGA1UdDwQEAwIFIDANBgkqhkiG9w0BAQUFAAOBgQC0lvq2fT3XCu8dB4au2kdQQvMPSVPLet9cS5bF5YSmox4YhLjF8BzBojNinU7fmxAnr5DL4Z4JL2OSf70SL6BOZAZUP8LTf2m3ES5096GGLvGryqNmHIeyublHhAa4sp7ya51NpkAi/Cj765WxORTMY+sF9D92R23Jj+Y8QslG1A==</adept:licenseCertificate>
|
||||
<adept:privateLicenseKey>MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBAN/5s94+/gYxsYFGyyT/d1eS3DScii7xuFYJeFrWL6tGLdWm6EeSPTYqAGM5oRYRnLdeSOS/skZ52/ZxPRlKEgLrA68ZOwpPZjcgmk2BahnOeJpY/PXMAYp997jW+OGtWP7xVu2a3QyHFRsze4Djt5yFLsqD0KnTfrJ0dhfvQR5VAgMBAAECgYBfw07xhnNsSJEBmjg/YG8xZVx7rjay7a0INFJeXFfTXlU4lX2ZJGDBqOGziy9h1TPxfwGhtIjP80hmLXKXPoFGKyTRhf1Z2QsLefX1hhpHhuWI6NxEtQiUiN4oD+igvIWQnPYkRJtth14hvOkl9wtQM6zFG1IV+8hkZf6gJ4c8gQJBAPq3K/UfSjHz1YmIo86wGU8bZHnsdo2uOX0biH3cQ20WsLv2cj6wo/DmFgVAE8hbYkW2yfrfN/ddL1skXTOHnSECQQDksj6mcZyzROW+DGC6csNEMuKVez7/DlWak4M4XwWa8wpQZPAqilNPjmrdK13Bsmxp8TrQDAJt4h/16GrWaEa1AkEAjdgQAJCBU52WVEeAFbG/v+fJgslrkWDemY94O2zgoNlTiCQ4IouhVOt3zeSgzJwXD0YJI+wiJ8sKvc/nAv5YwQJBAJVqp2gTnm+5ueh7Kc9nH5C1Nji3tybo9KDzc64m1wCvfbOc3xTMHzZBNCygIrdknVRyWRyIXCXysTL20KaYpmkCQHNYn681QtlOYC1AyMFcn/w78DmQwTDqlKIyx9oyaRJlEcq6KSeBgu1LJ0pGYq/5EGMYrp0KqMn/qXQ/1OSTY9M=</adept:privateLicenseKey>
|
||||
<adept:authenticationCertificate>MIIEYDCCA0igAwIBAgIER2q5eTANBgkqhkiG9w0BAQUFADCBhDELMAkGA1UEBhMCVVMxIzAhBgNVBAoTGkFkb2JlIFN5c3RlbXMgSW5jb3Jwb3JhdGVkMRswGQYDVQQLExJEaWdpdGFsIFB1Ymxpc2hpbmcxMzAxBgNVBAMTKkFkb2JlIENvbnRlbnQgU2VydmVyIENlcnRpZmljYXRlIEF1dGhvcml0eTAeFw0wODAxMDkxODQzNDNaFw0xODAxMzEwODAwMDBaMHwxKzApBgNVBAMTImh0dHA6Ly9hZGVhY3RpdmF0ZS5hZG9iZS5jb20vYWRlcHQxGzAZBgNVBAsTEkRpZ2l0YWwgUHVibGlzaGluZzEjMCEGA1UEChMaQWRvYmUgU3lzdGVtcyBJbmNvcnBvcmF0ZWQxCzAJBgNVBAYTAlVTMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDZAxpzOZ7N38ZGlQjfMY/lfu4Ta4xK3FRm069VwdqGZIwrfTTRxnLE4A9i1X00BnNk/5z7C0pQX435ylIEQPxIFBKTH+ip5rfDNh/Iu6cIlB0N4I/t7Pac8cIDwbc9HxcGTvXg3BFqPjaGVbmVZmoUtSVOsphdA43sZc6j1iFfOQIDAQABo4IBYzCCAV8wEgYDVR0TAQH/BAgwBgEB/wIBATAUBgNVHSUEDTALBgkqhkiG9y8CAQUwgbIGA1UdIASBqjCBpzCBpAYJKoZIhvcvAQIDMIGWMIGTBggrBgEFBQcCAjCBhhqBg1lvdSBhcmUgbm90IHBlcm1pdHRlZCB0byB1c2UgdGhpcyBMaWNlbnNlIENlcnRpZmljYXRlIGV4Y2VwdCBhcyBwZXJtaXR0ZWQgYnkgdGhlIGxpY2Vuc2UgYWdyZWVtZW50IGFjY29tcGFueWluZyB0aGUgQWRvYmUgc29mdHdhcmUuMDEGA1UdHwQqMCgwJqAkoCKGIGh0dHA6Ly9jcmwuYWRvYmUuY29tL2Fkb2JlQ1MuY3JsMAsGA1UdDwQEAwIBBjAfBgNVHSMEGDAWgBSL7vCBYMmi2h4OUsFYDASwQ/eP6DAdBgNVHQ4EFgQU9RP19K+lzF03he+0T47hCVkPhdAwDQYJKoZIhvcNAQEFBQADggEBAJoqOj+bUa+bDYyOSljs6SVzWH2BN2ylIeZKpTQYEo7jA62tRqW/rBZcNIgCudFvEYa7vH8lHhvQak1s95g+NaNidb5tpgbS8Q7/XTyEGS/4Q2HYWHD/8ydKFROGbMhfxpdJgkgn21mb7dbsfq5AZVGS3M4PP1xrMDYm50+Sip9QIm1RJuSaKivDa/piA5p8/cv6w44YBefLzGUN674Y7WS5u656MjdyJsN/7Oup+12fHGiye5QS5mToujGd6LpU80gfhNxhrphASiEBYQ/BUhWjHkSi0j4WOiGvGpT1Xvntcj0rf6XV6lNrOddOYUL+KdC1uDIe8PUI+naKI+nWgrs=</adept:authenticationCertificate>
|
||||
</adept:credentials>
|
||||
|
||||
<activationToken xmlns="http://ns.adobe.com/adept">
|
||||
<device>urn:uuid:32095968-696a-46d1-95ef-e76097c33051</device>
|
||||
<fingerprint>Pa7vI/H67wVERB/TsVjesFE6Kws=</fingerprint>
|
||||
<deviceType>standalone</deviceType>
|
||||
<activationURL>http://adeactivate.adobe.com/adept</activationURL>
|
||||
<user>urn:uuid:2d6cfbec-33fd-43ca-bcf9-e8b281114a17</user>
|
||||
<signature>B+Y6HxQ3o203HDb/5rSnal6Ca9tE8FEmVyiFcVpE9R7QzHo2NbpFzFHssFd2L+C7HKdFQ4pg+SFxyBLrDpLEdzILfPu+gRsDOvk/AGSisXEvdHsTFK9Yc5Cjkz8WkmWM1N6rgJ30V8AW6/d0mHj81g+Iue8VO8soBPkFwXGX1u4=</signature>
|
||||
</activationToken>
|
||||
<adept:operatorURLList xmlns:adept="http://ns.adobe.com/adept"><adept:user>urn:uuid:2d6cfbec-33fd-43ca-bcf9-e8b281114a17</adept:user><adept:operatorURL>https://acs4.kobo.com/fulfillment/Fulfill</adept:operatorURL></adept:operatorURLList><adept:licenseServices xmlns:adept="http://ns.adobe.com/adept"><adept:licenseServiceInfo><adept:licenseURL>https://nasigningservice.adobe.com/licensesign</adept:licenseURL><adept:certificate>MIIEvjCCA6agAwIBAgIER2q5ljANBgkqhkiG9w0BAQUFADCBhDELMAkGA1UEBhMCVVMxIzAhBgNVBAoTGkFkb2JlIFN5c3RlbXMgSW5jb3Jwb3JhdGVkMRswGQYDVQQLExJEaWdpdGFsIFB1Ymxpc2hpbmcxMzAxBgNVBAMTKkFkb2JlIENvbnRlbnQgU2VydmVyIENlcnRpZmljYXRlIEF1dGhvcml0eTAeFw0wODA4MTExNjMzNDhaFw0xMzA4MTEwNzAwMDBaMIGIMQswCQYDVQQGEwJVUzEjMCEGA1UEChMaQWRvYmUgU3lzdGVtcyBJbmNvcnBvcmF0ZWQxGzAZBgNVBAsTEkRpZ2l0YWwgUHVibGlzaGluZzE3MDUGA1UEAxMuaHR0cHM6Ly9uYXNpZ25pbmdzZXJ2aWNlLmFkb2JlLmNvbS9saWNlbnNlc2lnbjCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAs9GRZ1f5UTRySgZ2xAL7TaDKQBfdpIS9ei9Orica0N72BB/WE+82G5lfsZ2HdeCFDZG/oz2WPLXovcuUAbFKSIXVLyc7ONOd4sczeXQYPixeAvqzGtsyMArIzaeJcriGVPRnbD/spbuHR0BHhJEakIiDtQLJz+xgVYHlicx2H/kCAwEAAaOCAbQwggGwMAsGA1UdDwQEAwIFoDBYBglghkgBhvprHgEESwxJVGhlIHByaXZhdGUga2V5IGNvcnJlc3BvbmRpbmcgdG8gdGhpcyBjZXJ0aWZpY2F0ZSBtYXkgaGF2ZSBiZWVuIGV4cG9ydGVkLjAUBgNVHSUEDTALBgkqhkiG9y8CAQIwgbIGA1UdIASBqjCBpzCBpAYJKoZIhvcvAQIDMIGWMIGTBggrBgEFBQcCAjCBhhqBg1lvdSBhcmUgbm90IHBlcm1pdHRlZCB0byB1c2UgdGhpcyBMaWNlbnNlIENlcnRpZmljYXRlIGV4Y2VwdCBhcyBwZXJtaXR0ZWQgYnkgdGhlIGxpY2Vuc2UgYWdyZWVtZW50IGFjY29tcGFueWluZyB0aGUgQWRvYmUgc29mdHdhcmUuMDEGA1UdHwQqMCgwJqAkoCKGIGh0dHA6Ly9jcmwuYWRvYmUuY29tL2Fkb2JlQ1MuY3JsMB8GA1UdIwQYMBaAFIvu8IFgyaLaHg5SwVgMBLBD94/oMB0GA1UdDgQWBBSQ5K+bvggI6Rbh2u9nPhH8bcYTITAJBgNVHRMEAjAAMA0GCSqGSIb3DQEBBQUAA4IBAQC0l1L+BRCccZdb2d9zQBJ7JHkXWt1x/dUydU9I/na+QPFE5x+fGK4cRwaIfp6fNviGyvtJ6Wnxe6du/wlarC1o26UNpyWpnAltcy47LpVXsmcV5rUlhBx10l4lecuX0nx8/xF8joRz2BvvAusK+kxgKeiAjJg2W20wbJKh0Otct1ZihruQsEtGbZJ1L55xfNhrm6CKAHuGuTDYQ/S6W20dUaDUiNFhA2n2eEySLwUwgOuuhfVUPb8amQQKbF4rOQ2rdjAskEl/0CiavW6Xv0LGihThf6CjEbNSdy+vXQ7K9wFbKsE843DflpuSPfj2Aagtyrv/j1HsBjsf03e0uVu5</adept:certificate></adept:licenseServiceInfo></adept:licenseServices></activationInfo>
|
@ -1,11 +0,0 @@
|
||||
<?xml version="1.0"?>
|
||||
<adept:deviceInfo xmlns:adept="http://ns.adobe.com/adept">
|
||||
<adept:deviceType>standalone</adept:deviceType>
|
||||
<adept:deviceClass>Desktop</adept:deviceClass>
|
||||
<adept:deviceSerial>84abdfab8a0837c803a405f01b2fe493ae7b8c10</adept:deviceSerial>
|
||||
<adept:deviceName>lupusregina</adept:deviceName>
|
||||
<adept:version name="hobbes" value="9.3.58046"/>
|
||||
<adept:version name="clientOS" value="Windows Vista"/>
|
||||
<adept:version name="clientLocale" value="de"/>
|
||||
<adept:fingerprint>Pa7vI/H67wVERB/TsVjesFE6Kws=</adept:fingerprint>
|
||||
</adept:deviceInfo>
|
@ -1 +0,0 @@
|
||||
<EFBFBD>6(8[hK4<4B><03><>w,<2C>
|
@ -1,13 +0,0 @@
|
||||
.tox/
|
||||
__pycache__/
|
||||
build/
|
||||
dist/
|
||||
tests/output/
|
||||
tmp/
|
||||
*.egg-info/
|
||||
*.pyc
|
||||
*.pyo
|
||||
.python-version
|
||||
.DS_Store
|
||||
.coverage
|
||||
coverage.xml
|
@ -1,19 +0,0 @@
|
||||
Copyright (c) 2015-2022 Will Bond <will@wbond.net>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
of the Software, and to permit persons to whom the Software is furnished to do
|
||||
so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
@ -1,305 +0,0 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: asn1crypto
|
||||
Version: 1.5.1
|
||||
Summary: Fast ASN.1 parser and serializer with definitions for private keys, public keys, certificates, CRL, OCSP, CMS, PKCS#3, PKCS#7, PKCS#8, PKCS#12, PKCS#5, X.509 and TSP
|
||||
Home-page: https://github.com/wbond/asn1crypto
|
||||
Author: wbond
|
||||
Author-email: will@wbond.net
|
||||
License: MIT
|
||||
Description: # asn1crypto
|
||||
|
||||
A fast, pure Python library for parsing and serializing ASN.1 structures.
|
||||
|
||||
- [Features](#features)
|
||||
- [Why Another Python ASN.1 Library?](#why-another-python-asn1-library)
|
||||
- [Related Crypto Libraries](#related-crypto-libraries)
|
||||
- [Current Release](#current-release)
|
||||
- [Dependencies](#dependencies)
|
||||
- [Installation](#installation)
|
||||
- [License](#license)
|
||||
- [Security Policy](#security-policy)
|
||||
- [Documentation](#documentation)
|
||||
- [Continuous Integration](#continuous-integration)
|
||||
- [Testing](#testing)
|
||||
- [Development](#development)
|
||||
- [CI Tasks](#ci-tasks)
|
||||
|
||||
[![GitHub Actions CI](https://github.com/wbond/asn1crypto/workflows/CI/badge.svg)](https://github.com/wbond/asn1crypto/actions?workflow=CI)
|
||||
[![CircleCI](https://circleci.com/gh/wbond/asn1crypto.svg?style=shield)](https://circleci.com/gh/wbond/asn1crypto)
|
||||
[![PyPI](https://img.shields.io/pypi/v/asn1crypto.svg)](https://pypi.org/project/asn1crypto/)
|
||||
|
||||
## Features
|
||||
|
||||
In addition to an ASN.1 BER/DER decoder and DER serializer, the project includes
|
||||
a bunch of ASN.1 structures for use with various common cryptography standards:
|
||||
|
||||
| Standard | Module | Source |
|
||||
| ---------------------- | ------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------- |
|
||||
| X.509 | [`asn1crypto.x509`](asn1crypto/x509.py) | [RFC 5280](https://tools.ietf.org/html/rfc5280) |
|
||||
| CRL | [`asn1crypto.crl`](asn1crypto/crl.py) | [RFC 5280](https://tools.ietf.org/html/rfc5280) |
|
||||
| CSR | [`asn1crypto.csr`](asn1crypto/csr.py) | [RFC 2986](https://tools.ietf.org/html/rfc2986), [RFC 2985](https://tools.ietf.org/html/rfc2985) |
|
||||
| OCSP | [`asn1crypto.ocsp`](asn1crypto/ocsp.py) | [RFC 6960](https://tools.ietf.org/html/rfc6960) |
|
||||
| PKCS#12 | [`asn1crypto.pkcs12`](asn1crypto/pkcs12.py) | [RFC 7292](https://tools.ietf.org/html/rfc7292) |
|
||||
| PKCS#8 | [`asn1crypto.keys`](asn1crypto/keys.py) | [RFC 5208](https://tools.ietf.org/html/rfc5208) |
|
||||
| PKCS#1 v2.1 (RSA keys) | [`asn1crypto.keys`](asn1crypto/keys.py) | [RFC 3447](https://tools.ietf.org/html/rfc3447) |
|
||||
| DSA keys | [`asn1crypto.keys`](asn1crypto/keys.py) | [RFC 3279](https://tools.ietf.org/html/rfc3279) |
|
||||
| Elliptic curve keys | [`asn1crypto.keys`](asn1crypto/keys.py) | [SECG SEC1 V2](http://www.secg.org/sec1-v2.pdf) |
|
||||
| PKCS#3 v1.4 | [`asn1crypto.algos`](asn1crypto/algos.py) | [PKCS#3 v1.4](ftp://ftp.rsasecurity.com/pub/pkcs/ascii/pkcs-3.asc) |
|
||||
| PKCS#5 v2.1 | [`asn1crypto.algos`](asn1crypto/algos.py) | [PKCS#5 v2.1](http://www.emc.com/collateral/white-papers/h11302-pkcs5v2-1-password-based-cryptography-standard-wp.pdf) |
|
||||
| CMS (and PKCS#7) | [`asn1crypto.cms`](asn1crypto/cms.py) | [RFC 5652](https://tools.ietf.org/html/rfc5652), [RFC 2315](https://tools.ietf.org/html/rfc2315) |
|
||||
| TSP | [`asn1crypto.tsp`](asn1crypto/tsp.py) | [RFC 3161](https://tools.ietf.org/html/rfc3161) |
|
||||
| PDF signatures | [`asn1crypto.pdf`](asn1crypto/pdf.py) | [PDF 1.7](http://wwwimages.adobe.com/content/dam/Adobe/en/devnet/pdf/pdfs/PDF32000_2008.pdf) |
|
||||
|
||||
## Why Another Python ASN.1 Library?
|
||||
|
||||
Python has long had the [pyasn1](https://pypi.org/project/pyasn1/) and
|
||||
[pyasn1_modules](https://pypi.org/project/pyasn1-modules/) available for
|
||||
parsing and serializing ASN.1 structures. While the project does include a
|
||||
comprehensive set of tools for parsing and serializing, the performance of the
|
||||
library can be very poor, especially when dealing with bit fields and parsing
|
||||
large structures such as CRLs.
|
||||
|
||||
After spending extensive time using *pyasn1*, the following issues were
|
||||
identified:
|
||||
|
||||
1. Poor performance
|
||||
2. Verbose, non-pythonic API
|
||||
3. Out-dated and incomplete definitions in *pyasn1-modules*
|
||||
4. No simple way to map data to native Python data structures
|
||||
5. No mechanism for overridden universal ASN.1 types
|
||||
|
||||
The *pyasn1* API is largely method driven, and uses extensive configuration
|
||||
objects and lowerCamelCase names. There were no consistent options for
|
||||
converting types of native Python data structures. Since the project supports
|
||||
out-dated versions of Python, many newer language features are unavailable
|
||||
for use.
|
||||
|
||||
Time was spent trying to profile issues with the performance, however the
|
||||
architecture made it hard to pin down the primary source of the poor
|
||||
performance. Attempts were made to improve performance by utilizing unreleased
|
||||
patches and delaying parsing using the `Any` type. Even with such changes, the
|
||||
performance was still unacceptably slow.
|
||||
|
||||
Finally, a number of structures in the cryptographic space use universal data
|
||||
types such as `BitString` and `OctetString`, but interpret the data as other
|
||||
types. For instance, signatures are really byte strings, but are encoded as
|
||||
`BitString`. Elliptic curve keys use both `BitString` and `OctetString` to
|
||||
represent integers. Parsing these structures as the base universal types and
|
||||
then re-interpreting them wastes computation.
|
||||
|
||||
*asn1crypto* uses the following techniques to improve performance, especially
|
||||
when extracting one or two fields from large, complex structures:
|
||||
|
||||
- Delayed parsing of byte string values
|
||||
- Persistence of original ASN.1 encoded data until a value is changed
|
||||
- Lazy loading of child fields
|
||||
- Utilization of high-level Python stdlib modules
|
||||
|
||||
While there is no extensive performance test suite, the
|
||||
`CRLTests.test_parse_crl` test case was used to parse a 21MB CRL file on a
|
||||
late 2013 rMBP. *asn1crypto* parsed the certificate serial numbers in just
|
||||
under 8 seconds. With *pyasn1*, using definitions from *pyasn1-modules*, the
|
||||
same parsing took over 4,100 seconds.
|
||||
|
||||
For smaller structures the performance difference can range from a few times
|
||||
faster to an order of magnitude or more.
|
||||
|
||||
## Related Crypto Libraries
|
||||
|
||||
*asn1crypto* is part of the modularcrypto family of Python packages:
|
||||
|
||||
- [asn1crypto](https://github.com/wbond/asn1crypto)
|
||||
- [oscrypto](https://github.com/wbond/oscrypto)
|
||||
- [csrbuilder](https://github.com/wbond/csrbuilder)
|
||||
- [certbuilder](https://github.com/wbond/certbuilder)
|
||||
- [crlbuilder](https://github.com/wbond/crlbuilder)
|
||||
- [ocspbuilder](https://github.com/wbond/ocspbuilder)
|
||||
- [certvalidator](https://github.com/wbond/certvalidator)
|
||||
|
||||
## Current Release
|
||||
|
||||
1.5.0 - [changelog](changelog.md)
|
||||
|
||||
## Dependencies
|
||||
|
||||
Python 2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6, 3.7, 3.8, 3.9, 3.10 or pypy. *No third-party
|
||||
packages required.*
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
pip install asn1crypto
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
*asn1crypto* is licensed under the terms of the MIT license. See the
|
||||
[LICENSE](LICENSE) file for the exact license text.
|
||||
|
||||
## Security Policy
|
||||
|
||||
The security policies for this project are covered in
|
||||
[SECURITY.md](https://github.com/wbond/asn1crypto/blob/master/SECURITY.md).
|
||||
|
||||
## Documentation
|
||||
|
||||
The documentation for *asn1crypto* is composed of tutorials on basic usage and
|
||||
links to the source for the various pre-defined type classes.
|
||||
|
||||
### Tutorials
|
||||
|
||||
- [Universal Types with BER/DER Decoder and DER Encoder](docs/universal_types.md)
|
||||
- [PEM Encoder and Decoder](docs/pem.md)
|
||||
|
||||
### Reference
|
||||
|
||||
- [Universal types](asn1crypto/core.py), `asn1crypto.core`
|
||||
- [Digest, HMAC, signed digest and encryption algorithms](asn1crypto/algos.py), `asn1crypto.algos`
|
||||
- [Private and public keys](asn1crypto/keys.py), `asn1crypto.keys`
|
||||
- [X509 certificates](asn1crypto/x509.py), `asn1crypto.x509`
|
||||
- [Certificate revocation lists (CRLs)](asn1crypto/crl.py), `asn1crypto.crl`
|
||||
- [Online certificate status protocol (OCSP)](asn1crypto/ocsp.py), `asn1crypto.ocsp`
|
||||
- [Certificate signing requests (CSRs)](asn1crypto/csr.py), `asn1crypto.csr`
|
||||
- [Private key/certificate containers (PKCS#12)](asn1crypto/pkcs12.py), `asn1crypto.pkcs12`
|
||||
- [Cryptographic message syntax (CMS, PKCS#7)](asn1crypto/cms.py), `asn1crypto.cms`
|
||||
- [Time stamp protocol (TSP)](asn1crypto/tsp.py), `asn1crypto.tsp`
|
||||
- [PDF signatures](asn1crypto/pdf.py), `asn1crypto.pdf`
|
||||
|
||||
## Continuous Integration
|
||||
|
||||
Various combinations of platforms and versions of Python are tested via:
|
||||
|
||||
- [macOS, Linux, Windows](https://github.com/wbond/asn1crypto/actions/workflows/ci.yml) via GitHub Actions
|
||||
- [arm64](https://circleci.com/gh/wbond/asn1crypto) via CircleCI
|
||||
|
||||
## Testing
|
||||
|
||||
Tests are written using `unittest` and require no third-party packages.
|
||||
|
||||
Depending on what type of source is available for the package, the following
|
||||
commands can be used to run the test suite.
|
||||
|
||||
### Git Repository
|
||||
|
||||
When working within a Git working copy, or an archive of the Git repository,
|
||||
the full test suite is run via:
|
||||
|
||||
```bash
|
||||
python run.py tests
|
||||
```
|
||||
|
||||
To run only some tests, pass a regular expression as a parameter to `tests`.
|
||||
|
||||
```bash
|
||||
python run.py tests ocsp
|
||||
```
|
||||
|
||||
### PyPi Source Distribution
|
||||
|
||||
When working within an extracted source distribution (aka `.tar.gz`) from
|
||||
PyPi, the full test suite is run via:
|
||||
|
||||
```bash
|
||||
python setup.py test
|
||||
```
|
||||
|
||||
### Package
|
||||
|
||||
When the package has been installed via pip (or another method), the package
|
||||
`asn1crypto_tests` may be installed and invoked to run the full test suite:
|
||||
|
||||
```bash
|
||||
pip install asn1crypto_tests
|
||||
python -m asn1crypto_tests
|
||||
```
|
||||
|
||||
## Development
|
||||
|
||||
To install the package used for linting, execute:
|
||||
|
||||
```bash
|
||||
pip install --user -r requires/lint
|
||||
```
|
||||
|
||||
The following command will run the linter:
|
||||
|
||||
```bash
|
||||
python run.py lint
|
||||
```
|
||||
|
||||
Support for code coverage can be installed via:
|
||||
|
||||
```bash
|
||||
pip install --user -r requires/coverage
|
||||
```
|
||||
|
||||
Coverage is measured by running:
|
||||
|
||||
```bash
|
||||
python run.py coverage
|
||||
```
|
||||
|
||||
To change the version number of the package, run:
|
||||
|
||||
```bash
|
||||
python run.py version {pep440_version}
|
||||
```
|
||||
|
||||
To install the necessary packages for releasing a new version on PyPI, run:
|
||||
|
||||
```bash
|
||||
pip install --user -r requires/release
|
||||
```
|
||||
|
||||
Releases are created by:
|
||||
|
||||
- Making a git tag in [PEP 440](https://www.python.org/dev/peps/pep-0440/#examples-of-compliant-version-schemes) format
|
||||
- Running the command:
|
||||
|
||||
```bash
|
||||
python run.py release
|
||||
```
|
||||
|
||||
Existing releases can be found at https://pypi.org/project/asn1crypto/.
|
||||
|
||||
## CI Tasks
|
||||
|
||||
A task named `deps` exists to download and stage all necessary testing
|
||||
dependencies. On posix platforms, `curl` is used for downloads and on Windows
|
||||
PowerShell with `Net.WebClient` is used. This configuration sidesteps issues
|
||||
related to getting pip to work properly and messing with `site-packages` for
|
||||
the version of Python being used.
|
||||
|
||||
The `ci` task runs `lint` (if flake8 is available for the version of Python) and
|
||||
`coverage` (or `tests` if coverage is not available for the version of Python).
|
||||
If the current directory is a clean git working copy, the coverage data is
|
||||
submitted to codecov.io.
|
||||
|
||||
```bash
|
||||
python run.py deps
|
||||
python run.py ci
|
||||
```
|
||||
|
||||
Keywords: asn1 crypto pki x509 certificate rsa dsa ec dh
|
||||
Platform: UNKNOWN
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: MIT License
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 2
|
||||
Classifier: Programming Language :: Python :: 2.6
|
||||
Classifier: Programming Language :: Python :: 2.7
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3.2
|
||||
Classifier: Programming Language :: Python :: 3.3
|
||||
Classifier: Programming Language :: Python :: 3.4
|
||||
Classifier: Programming Language :: Python :: 3.5
|
||||
Classifier: Programming Language :: Python :: 3.6
|
||||
Classifier: Programming Language :: Python :: 3.7
|
||||
Classifier: Programming Language :: Python :: 3.8
|
||||
Classifier: Programming Language :: Python :: 3.9
|
||||
Classifier: Programming Language :: Python :: 3.10
|
||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||
Classifier: Topic :: Security :: Cryptography
|
||||
Description-Content-Type: text/markdown
|
@ -1,37 +0,0 @@
|
||||
# Security Policy
|
||||
|
||||
## How to Report
|
||||
|
||||
If you believe you've found an issue that has security implications, please do
|
||||
not post a public issue on GitHub. Instead, email the project lead, Will Bond,
|
||||
at will@wbond.net.
|
||||
|
||||
You should receive a response within two business days, and follow up emails
|
||||
during the process of confirming the potential issue.
|
||||
|
||||
## Supported Versions
|
||||
|
||||
The asn1crypto project only provides security patches for the most recent
|
||||
release. This is primarily a function of available resources.
|
||||
|
||||
## Disclosure Process
|
||||
|
||||
The following process is used when handling a potential secuirty issue:
|
||||
|
||||
1. The report should be emailed to will@wbond.net, and NOT posted on the
|
||||
GitHub issue tracker.
|
||||
2. Confirmation of receipt of the report should happen within two business
|
||||
days.
|
||||
3. Information will be collected and an investigation will be performed to
|
||||
determine if a security issue exists.
|
||||
4. If no security issue is found, the process will end.
|
||||
5. A fix for the issue and announcement will be drafted.
|
||||
6. A release schedule and accouncement will be negotiated between the
|
||||
reporter and the project
|
||||
7. The security contacts for Arch Linux, Conda, Debian, Fedora, FreeBSD,
|
||||
Ubuntu, and Tidelift will be contacted to notify them of an upcoming
|
||||
security release.
|
||||
8. Fixes for all vulnerabilities will be performed, and new releases made,
|
||||
but without mention of a security issue. These changes and releases will
|
||||
be published before the announcement.
|
||||
9. An announcement will be made disclosing the vulnerability and the fix.
|
@ -1,47 +0,0 @@
|
||||
# coding: utf-8
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
from .version import __version__, __version_info__
|
||||
|
||||
__all__ = [
|
||||
'__version__',
|
||||
'__version_info__',
|
||||
'load_order',
|
||||
]
|
||||
|
||||
|
||||
def load_order():
|
||||
"""
|
||||
Returns a list of the module and sub-module names for asn1crypto in
|
||||
dependency load order, for the sake of live reloading code
|
||||
|
||||
:return:
|
||||
A list of unicode strings of module names, as they would appear in
|
||||
sys.modules, ordered by which module should be reloaded first
|
||||
"""
|
||||
|
||||
return [
|
||||
'asn1crypto._errors',
|
||||
'asn1crypto._int',
|
||||
'asn1crypto._ordereddict',
|
||||
'asn1crypto._teletex_codec',
|
||||
'asn1crypto._types',
|
||||
'asn1crypto._inet',
|
||||
'asn1crypto._iri',
|
||||
'asn1crypto.version',
|
||||
'asn1crypto.pem',
|
||||
'asn1crypto.util',
|
||||
'asn1crypto.parser',
|
||||
'asn1crypto.core',
|
||||
'asn1crypto.algos',
|
||||
'asn1crypto.keys',
|
||||
'asn1crypto.x509',
|
||||
'asn1crypto.crl',
|
||||
'asn1crypto.csr',
|
||||
'asn1crypto.ocsp',
|
||||
'asn1crypto.cms',
|
||||
'asn1crypto.pdf',
|
||||
'asn1crypto.pkcs12',
|
||||
'asn1crypto.tsp',
|
||||
'asn1crypto',
|
||||
]
|
@ -1,54 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Exports the following items:
|
||||
|
||||
- unwrap()
|
||||
- APIException()
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
import re
|
||||
import textwrap
|
||||
|
||||
|
||||
class APIException(Exception):
|
||||
"""
|
||||
An exception indicating an API has been removed from asn1crypto
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
def unwrap(string, *params):
|
||||
"""
|
||||
Takes a multi-line string and does the following:
|
||||
|
||||
- dedents
|
||||
- converts newlines with text before and after into a single line
|
||||
- strips leading and trailing whitespace
|
||||
|
||||
:param string:
|
||||
The string to format
|
||||
|
||||
:param *params:
|
||||
Params to interpolate into the string
|
||||
|
||||
:return:
|
||||
The formatted string
|
||||
"""
|
||||
|
||||
output = textwrap.dedent(string)
|
||||
|
||||
# Unwrap lines, taking into account bulleted lists, ordered lists and
|
||||
# underlines consisting of = signs
|
||||
if output.find('\n') != -1:
|
||||
output = re.sub('(?<=\\S)\n(?=[^ \n\t\\d\\*\\-=])', ' ', output)
|
||||
|
||||
if params:
|
||||
output = output % params
|
||||
|
||||
output = output.strip()
|
||||
|
||||
return output
|
@ -1,170 +0,0 @@
|
||||
# coding: utf-8
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
import socket
|
||||
import struct
|
||||
|
||||
from ._errors import unwrap
|
||||
from ._types import byte_cls, bytes_to_list, str_cls, type_name
|
||||
|
||||
|
||||
def inet_ntop(address_family, packed_ip):
|
||||
"""
|
||||
Windows compatibility shim for socket.inet_ntop().
|
||||
|
||||
:param address_family:
|
||||
socket.AF_INET for IPv4 or socket.AF_INET6 for IPv6
|
||||
|
||||
:param packed_ip:
|
||||
A byte string of the network form of an IP address
|
||||
|
||||
:return:
|
||||
A unicode string of the IP address
|
||||
"""
|
||||
|
||||
if address_family not in set([socket.AF_INET, socket.AF_INET6]):
|
||||
raise ValueError(unwrap(
|
||||
'''
|
||||
address_family must be socket.AF_INET (%s) or socket.AF_INET6 (%s),
|
||||
not %s
|
||||
''',
|
||||
repr(socket.AF_INET),
|
||||
repr(socket.AF_INET6),
|
||||
repr(address_family)
|
||||
))
|
||||
|
||||
if not isinstance(packed_ip, byte_cls):
|
||||
raise TypeError(unwrap(
|
||||
'''
|
||||
packed_ip must be a byte string, not %s
|
||||
''',
|
||||
type_name(packed_ip)
|
||||
))
|
||||
|
||||
required_len = 4 if address_family == socket.AF_INET else 16
|
||||
if len(packed_ip) != required_len:
|
||||
raise ValueError(unwrap(
|
||||
'''
|
||||
packed_ip must be %d bytes long - is %d
|
||||
''',
|
||||
required_len,
|
||||
len(packed_ip)
|
||||
))
|
||||
|
||||
if address_family == socket.AF_INET:
|
||||
return '%d.%d.%d.%d' % tuple(bytes_to_list(packed_ip))
|
||||
|
||||
octets = struct.unpack(b'!HHHHHHHH', packed_ip)
|
||||
|
||||
runs_of_zero = {}
|
||||
longest_run = 0
|
||||
zero_index = None
|
||||
for i, octet in enumerate(octets + (-1,)):
|
||||
if octet != 0:
|
||||
if zero_index is not None:
|
||||
length = i - zero_index
|
||||
if length not in runs_of_zero:
|
||||
runs_of_zero[length] = zero_index
|
||||
longest_run = max(longest_run, length)
|
||||
zero_index = None
|
||||
elif zero_index is None:
|
||||
zero_index = i
|
||||
|
||||
hexed = [hex(o)[2:] for o in octets]
|
||||
|
||||
if longest_run < 2:
|
||||
return ':'.join(hexed)
|
||||
|
||||
zero_start = runs_of_zero[longest_run]
|
||||
zero_end = zero_start + longest_run
|
||||
|
||||
return ':'.join(hexed[:zero_start]) + '::' + ':'.join(hexed[zero_end:])
|
||||
|
||||
|
||||
def inet_pton(address_family, ip_string):
|
||||
"""
|
||||
Windows compatibility shim for socket.inet_ntop().
|
||||
|
||||
:param address_family:
|
||||
socket.AF_INET for IPv4 or socket.AF_INET6 for IPv6
|
||||
|
||||
:param ip_string:
|
||||
A unicode string of an IP address
|
||||
|
||||
:return:
|
||||
A byte string of the network form of the IP address
|
||||
"""
|
||||
|
||||
if address_family not in set([socket.AF_INET, socket.AF_INET6]):
|
||||
raise ValueError(unwrap(
|
||||
'''
|
||||
address_family must be socket.AF_INET (%s) or socket.AF_INET6 (%s),
|
||||
not %s
|
||||
''',
|
||||
repr(socket.AF_INET),
|
||||
repr(socket.AF_INET6),
|
||||
repr(address_family)
|
||||
))
|
||||
|
||||
if not isinstance(ip_string, str_cls):
|
||||
raise TypeError(unwrap(
|
||||
'''
|
||||
ip_string must be a unicode string, not %s
|
||||
''',
|
||||
type_name(ip_string)
|
||||
))
|
||||
|
||||
if address_family == socket.AF_INET:
|
||||
octets = ip_string.split('.')
|
||||
error = len(octets) != 4
|
||||
if not error:
|
||||
ints = []
|
||||
for o in octets:
|
||||
o = int(o)
|
||||
if o > 255 or o < 0:
|
||||
error = True
|
||||
break
|
||||
ints.append(o)
|
||||
|
||||
if error:
|
||||
raise ValueError(unwrap(
|
||||
'''
|
||||
ip_string must be a dotted string with four integers in the
|
||||
range of 0 to 255, got %s
|
||||
''',
|
||||
repr(ip_string)
|
||||
))
|
||||
|
||||
return struct.pack(b'!BBBB', *ints)
|
||||
|
||||
error = False
|
||||
omitted = ip_string.count('::')
|
||||
if omitted > 1:
|
||||
error = True
|
||||
elif omitted == 0:
|
||||
octets = ip_string.split(':')
|
||||
error = len(octets) != 8
|
||||
else:
|
||||
begin, end = ip_string.split('::')
|
||||
begin_octets = begin.split(':')
|
||||
end_octets = end.split(':')
|
||||
missing = 8 - len(begin_octets) - len(end_octets)
|
||||
octets = begin_octets + (['0'] * missing) + end_octets
|
||||
|
||||
if not error:
|
||||
ints = []
|
||||
for o in octets:
|
||||
o = int(o, 16)
|
||||
if o > 65535 or o < 0:
|
||||
error = True
|
||||
break
|
||||
ints.append(o)
|
||||
|
||||
return struct.pack(b'!HHHHHHHH', *ints)
|
||||
|
||||
raise ValueError(unwrap(
|
||||
'''
|
||||
ip_string must be a valid ipv6 string, got %s
|
||||
''',
|
||||
repr(ip_string)
|
||||
))
|
@ -1,22 +0,0 @@
|
||||
# coding: utf-8
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
|
||||
def fill_width(bytes_, width):
|
||||
"""
|
||||
Ensure a byte string representing a positive integer is a specific width
|
||||
(in bytes)
|
||||
|
||||
:param bytes_:
|
||||
The integer byte string
|
||||
|
||||
:param width:
|
||||
The desired width as an integer
|
||||
|
||||
:return:
|
||||
A byte string of the width specified
|
||||
"""
|
||||
|
||||
while len(bytes_) < width:
|
||||
bytes_ = b'\x00' + bytes_
|
||||
return bytes_
|
@ -1,291 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Functions to convert unicode IRIs into ASCII byte string URIs and back. Exports
|
||||
the following items:
|
||||
|
||||
- iri_to_uri()
|
||||
- uri_to_iri()
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
from encodings import idna # noqa
|
||||
import codecs
|
||||
import re
|
||||
import sys
|
||||
|
||||
from ._errors import unwrap
|
||||
from ._types import byte_cls, str_cls, type_name, bytes_to_list, int_types
|
||||
|
||||
if sys.version_info < (3,):
|
||||
from urlparse import urlsplit, urlunsplit
|
||||
from urllib import (
|
||||
quote as urlquote,
|
||||
unquote as unquote_to_bytes,
|
||||
)
|
||||
|
||||
else:
|
||||
from urllib.parse import (
|
||||
quote as urlquote,
|
||||
unquote_to_bytes,
|
||||
urlsplit,
|
||||
urlunsplit,
|
||||
)
|
||||
|
||||
|
||||
def iri_to_uri(value, normalize=False):
|
||||
"""
|
||||
Encodes a unicode IRI into an ASCII byte string URI
|
||||
|
||||
:param value:
|
||||
A unicode string of an IRI
|
||||
|
||||
:param normalize:
|
||||
A bool that controls URI normalization
|
||||
|
||||
:return:
|
||||
A byte string of the ASCII-encoded URI
|
||||
"""
|
||||
|
||||
if not isinstance(value, str_cls):
|
||||
raise TypeError(unwrap(
|
||||
'''
|
||||
value must be a unicode string, not %s
|
||||
''',
|
||||
type_name(value)
|
||||
))
|
||||
|
||||
scheme = None
|
||||
# Python 2.6 doesn't split properly is the URL doesn't start with http:// or https://
|
||||
if sys.version_info < (2, 7) and not value.startswith('http://') and not value.startswith('https://'):
|
||||
real_prefix = None
|
||||
prefix_match = re.match('^[^:]*://', value)
|
||||
if prefix_match:
|
||||
real_prefix = prefix_match.group(0)
|
||||
value = 'http://' + value[len(real_prefix):]
|
||||
parsed = urlsplit(value)
|
||||
if real_prefix:
|
||||
value = real_prefix + value[7:]
|
||||
scheme = _urlquote(real_prefix[:-3])
|
||||
else:
|
||||
parsed = urlsplit(value)
|
||||
|
||||
if scheme is None:
|
||||
scheme = _urlquote(parsed.scheme)
|
||||
hostname = parsed.hostname
|
||||
if hostname is not None:
|
||||
hostname = hostname.encode('idna')
|
||||
# RFC 3986 allows userinfo to contain sub-delims
|
||||
username = _urlquote(parsed.username, safe='!$&\'()*+,;=')
|
||||
password = _urlquote(parsed.password, safe='!$&\'()*+,;=')
|
||||
port = parsed.port
|
||||
if port is not None:
|
||||
port = str_cls(port).encode('ascii')
|
||||
|
||||
netloc = b''
|
||||
if username is not None:
|
||||
netloc += username
|
||||
if password:
|
||||
netloc += b':' + password
|
||||
netloc += b'@'
|
||||
if hostname is not None:
|
||||
netloc += hostname
|
||||
if port is not None:
|
||||
default_http = scheme == b'http' and port == b'80'
|
||||
default_https = scheme == b'https' and port == b'443'
|
||||
if not normalize or (not default_http and not default_https):
|
||||
netloc += b':' + port
|
||||
|
||||
# RFC 3986 allows a path to contain sub-delims, plus "@" and ":"
|
||||
path = _urlquote(parsed.path, safe='/!$&\'()*+,;=@:')
|
||||
# RFC 3986 allows the query to contain sub-delims, plus "@", ":" , "/" and "?"
|
||||
query = _urlquote(parsed.query, safe='/?!$&\'()*+,;=@:')
|
||||
# RFC 3986 allows the fragment to contain sub-delims, plus "@", ":" , "/" and "?"
|
||||
fragment = _urlquote(parsed.fragment, safe='/?!$&\'()*+,;=@:')
|
||||
|
||||
if normalize and query is None and fragment is None and path == b'/':
|
||||
path = None
|
||||
|
||||
# Python 2.7 compat
|
||||
if path is None:
|
||||
path = ''
|
||||
|
||||
output = urlunsplit((scheme, netloc, path, query, fragment))
|
||||
if isinstance(output, str_cls):
|
||||
output = output.encode('latin1')
|
||||
return output
|
||||
|
||||
|
||||
def uri_to_iri(value):
|
||||
"""
|
||||
Converts an ASCII URI byte string into a unicode IRI
|
||||
|
||||
:param value:
|
||||
An ASCII-encoded byte string of the URI
|
||||
|
||||
:return:
|
||||
A unicode string of the IRI
|
||||
"""
|
||||
|
||||
if not isinstance(value, byte_cls):
|
||||
raise TypeError(unwrap(
|
||||
'''
|
||||
value must be a byte string, not %s
|
||||
''',
|
||||
type_name(value)
|
||||
))
|
||||
|
||||
parsed = urlsplit(value)
|
||||
|
||||
scheme = parsed.scheme
|
||||
if scheme is not None:
|
||||
scheme = scheme.decode('ascii')
|
||||
|
||||
username = _urlunquote(parsed.username, remap=[':', '@'])
|
||||
password = _urlunquote(parsed.password, remap=[':', '@'])
|
||||
hostname = parsed.hostname
|
||||
if hostname:
|
||||
hostname = hostname.decode('idna')
|
||||
port = parsed.port
|
||||
if port and not isinstance(port, int_types):
|
||||
port = port.decode('ascii')
|
||||
|
||||
netloc = ''
|
||||
if username is not None:
|
||||
netloc += username
|
||||
if password:
|
||||
netloc += ':' + password
|
||||
netloc += '@'
|
||||
if hostname is not None:
|
||||
netloc += hostname
|
||||
if port is not None:
|
||||
netloc += ':' + str_cls(port)
|
||||
|
||||
path = _urlunquote(parsed.path, remap=['/'], preserve=True)
|
||||
query = _urlunquote(parsed.query, remap=['&', '='], preserve=True)
|
||||
fragment = _urlunquote(parsed.fragment)
|
||||
|
||||
return urlunsplit((scheme, netloc, path, query, fragment))
|
||||
|
||||
|
||||
def _iri_utf8_errors_handler(exc):
|
||||
"""
|
||||
Error handler for decoding UTF-8 parts of a URI into an IRI. Leaves byte
|
||||
sequences encoded in %XX format, but as part of a unicode string.
|
||||
|
||||
:param exc:
|
||||
The UnicodeDecodeError exception
|
||||
|
||||
:return:
|
||||
A 2-element tuple of (replacement unicode string, integer index to
|
||||
resume at)
|
||||
"""
|
||||
|
||||
bytes_as_ints = bytes_to_list(exc.object[exc.start:exc.end])
|
||||
replacements = ['%%%02x' % num for num in bytes_as_ints]
|
||||
return (''.join(replacements), exc.end)
|
||||
|
||||
|
||||
codecs.register_error('iriutf8', _iri_utf8_errors_handler)
|
||||
|
||||
|
||||
def _urlquote(string, safe=''):
|
||||
"""
|
||||
Quotes a unicode string for use in a URL
|
||||
|
||||
:param string:
|
||||
A unicode string
|
||||
|
||||
:param safe:
|
||||
A unicode string of character to not encode
|
||||
|
||||
:return:
|
||||
None (if string is None) or an ASCII byte string of the quoted string
|
||||
"""
|
||||
|
||||
if string is None or string == '':
|
||||
return None
|
||||
|
||||
# Anything already hex quoted is pulled out of the URL and unquoted if
|
||||
# possible
|
||||
escapes = []
|
||||
if re.search('%[0-9a-fA-F]{2}', string):
|
||||
# Try to unquote any percent values, restoring them if they are not
|
||||
# valid UTF-8. Also, requote any safe chars since encoded versions of
|
||||
# those are functionally different than the unquoted ones.
|
||||
def _try_unescape(match):
|
||||
byte_string = unquote_to_bytes(match.group(0))
|
||||
unicode_string = byte_string.decode('utf-8', 'iriutf8')
|
||||
for safe_char in list(safe):
|
||||
unicode_string = unicode_string.replace(safe_char, '%%%02x' % ord(safe_char))
|
||||
return unicode_string
|
||||
string = re.sub('(?:%[0-9a-fA-F]{2})+', _try_unescape, string)
|
||||
|
||||
# Once we have the minimal set of hex quoted values, removed them from
|
||||
# the string so that they are not double quoted
|
||||
def _extract_escape(match):
|
||||
escapes.append(match.group(0).encode('ascii'))
|
||||
return '\x00'
|
||||
string = re.sub('%[0-9a-fA-F]{2}', _extract_escape, string)
|
||||
|
||||
output = urlquote(string.encode('utf-8'), safe=safe.encode('utf-8'))
|
||||
if not isinstance(output, byte_cls):
|
||||
output = output.encode('ascii')
|
||||
|
||||
# Restore the existing quoted values that we extracted
|
||||
if len(escapes) > 0:
|
||||
def _return_escape(_):
|
||||
return escapes.pop(0)
|
||||
output = re.sub(b'%00', _return_escape, output)
|
||||
|
||||
return output
|
||||
|
||||
|
||||
def _urlunquote(byte_string, remap=None, preserve=None):
|
||||
"""
|
||||
Unquotes a URI portion from a byte string into unicode using UTF-8
|
||||
|
||||
:param byte_string:
|
||||
A byte string of the data to unquote
|
||||
|
||||
:param remap:
|
||||
A list of characters (as unicode) that should be re-mapped to a
|
||||
%XX encoding. This is used when characters are not valid in part of a
|
||||
URL.
|
||||
|
||||
:param preserve:
|
||||
A bool - indicates that the chars to be remapped if they occur in
|
||||
non-hex form, should be preserved. E.g. / for URL path.
|
||||
|
||||
:return:
|
||||
A unicode string
|
||||
"""
|
||||
|
||||
if byte_string is None:
|
||||
return byte_string
|
||||
|
||||
if byte_string == b'':
|
||||
return ''
|
||||
|
||||
if preserve:
|
||||
replacements = ['\x1A', '\x1C', '\x1D', '\x1E', '\x1F']
|
||||
preserve_unmap = {}
|
||||
for char in remap:
|
||||
replacement = replacements.pop(0)
|
||||
preserve_unmap[replacement] = char
|
||||
byte_string = byte_string.replace(char.encode('ascii'), replacement.encode('ascii'))
|
||||
|
||||
byte_string = unquote_to_bytes(byte_string)
|
||||
|
||||
if remap:
|
||||
for char in remap:
|
||||
byte_string = byte_string.replace(char.encode('ascii'), ('%%%02x' % ord(char)).encode('ascii'))
|
||||
|
||||
output = byte_string.decode('utf-8', 'iriutf8')
|
||||
|
||||
if preserve:
|
||||
for replacement, original in preserve_unmap.items():
|
||||
output = output.replace(replacement, original)
|
||||
|
||||
return output
|
@ -1,135 +0,0 @@
|
||||
# Copyright (c) 2009 Raymond Hettinger
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person
|
||||
# obtaining a copy of this software and associated documentation files
|
||||
# (the "Software"), to deal in the Software without restriction,
|
||||
# including without limitation the rights to use, copy, modify, merge,
|
||||
# publish, distribute, sublicense, and/or sell copies of the Software,
|
||||
# and to permit persons to whom the Software is furnished to do so,
|
||||
# subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
# OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
import sys
|
||||
|
||||
if not sys.version_info < (2, 7):
|
||||
|
||||
from collections import OrderedDict
|
||||
|
||||
else:
|
||||
|
||||
from UserDict import DictMixin
|
||||
|
||||
class OrderedDict(dict, DictMixin):
|
||||
|
||||
def __init__(self, *args, **kwds):
|
||||
if len(args) > 1:
|
||||
raise TypeError('expected at most 1 arguments, got %d' % len(args))
|
||||
try:
|
||||
self.__end
|
||||
except AttributeError:
|
||||
self.clear()
|
||||
self.update(*args, **kwds)
|
||||
|
||||
def clear(self):
|
||||
self.__end = end = []
|
||||
end += [None, end, end] # sentinel node for doubly linked list
|
||||
self.__map = {} # key --> [key, prev, next]
|
||||
dict.clear(self)
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
if key not in self:
|
||||
end = self.__end
|
||||
curr = end[1]
|
||||
curr[2] = end[1] = self.__map[key] = [key, curr, end]
|
||||
dict.__setitem__(self, key, value)
|
||||
|
||||
def __delitem__(self, key):
|
||||
dict.__delitem__(self, key)
|
||||
key, prev, next_ = self.__map.pop(key)
|
||||
prev[2] = next_
|
||||
next_[1] = prev
|
||||
|
||||
def __iter__(self):
|
||||
end = self.__end
|
||||
curr = end[2]
|
||||
while curr is not end:
|
||||
yield curr[0]
|
||||
curr = curr[2]
|
||||
|
||||
def __reversed__(self):
|
||||
end = self.__end
|
||||
curr = end[1]
|
||||
while curr is not end:
|
||||
yield curr[0]
|
||||
curr = curr[1]
|
||||
|
||||
def popitem(self, last=True):
|
||||
if not self:
|
||||
raise KeyError('dictionary is empty')
|
||||
if last:
|
||||
key = reversed(self).next()
|
||||
else:
|
||||
key = iter(self).next()
|
||||
value = self.pop(key)
|
||||
return key, value
|
||||
|
||||
def __reduce__(self):
|
||||
items = [[k, self[k]] for k in self]
|
||||
tmp = self.__map, self.__end
|
||||
del self.__map, self.__end
|
||||
inst_dict = vars(self).copy()
|
||||
self.__map, self.__end = tmp
|
||||
if inst_dict:
|
||||
return (self.__class__, (items,), inst_dict)
|
||||
return self.__class__, (items,)
|
||||
|
||||
def keys(self):
|
||||
return list(self)
|
||||
|
||||
setdefault = DictMixin.setdefault
|
||||
update = DictMixin.update
|
||||
pop = DictMixin.pop
|
||||
values = DictMixin.values
|
||||
items = DictMixin.items
|
||||
iterkeys = DictMixin.iterkeys
|
||||
itervalues = DictMixin.itervalues
|
||||
iteritems = DictMixin.iteritems
|
||||
|
||||
def __repr__(self):
|
||||
if not self:
|
||||
return '%s()' % (self.__class__.__name__,)
|
||||
return '%s(%r)' % (self.__class__.__name__, self.items())
|
||||
|
||||
def copy(self):
|
||||
return self.__class__(self)
|
||||
|
||||
@classmethod
|
||||
def fromkeys(cls, iterable, value=None):
|
||||
d = cls()
|
||||
for key in iterable:
|
||||
d[key] = value
|
||||
return d
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, OrderedDict):
|
||||
if len(self) != len(other):
|
||||
return False
|
||||
for p, q in zip(self.items(), other.items()):
|
||||
if p != q:
|
||||
return False
|
||||
return True
|
||||
return dict.__eq__(self, other)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
@ -1,331 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Implementation of the teletex T.61 codec. Exports the following items:
|
||||
|
||||
- register()
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
import codecs
|
||||
|
||||
|
||||
class TeletexCodec(codecs.Codec):
|
||||
|
||||
def encode(self, input_, errors='strict'):
|
||||
return codecs.charmap_encode(input_, errors, ENCODING_TABLE)
|
||||
|
||||
def decode(self, input_, errors='strict'):
|
||||
return codecs.charmap_decode(input_, errors, DECODING_TABLE)
|
||||
|
||||
|
||||
class TeletexIncrementalEncoder(codecs.IncrementalEncoder):
|
||||
|
||||
def encode(self, input_, final=False):
|
||||
return codecs.charmap_encode(input_, self.errors, ENCODING_TABLE)[0]
|
||||
|
||||
|
||||
class TeletexIncrementalDecoder(codecs.IncrementalDecoder):
|
||||
|
||||
def decode(self, input_, final=False):
|
||||
return codecs.charmap_decode(input_, self.errors, DECODING_TABLE)[0]
|
||||
|
||||
|
||||
class TeletexStreamWriter(TeletexCodec, codecs.StreamWriter):
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class TeletexStreamReader(TeletexCodec, codecs.StreamReader):
|
||||
|
||||
pass
|
||||
|
||||
|
||||
def teletex_search_function(name):
|
||||
"""
|
||||
Search function for teletex codec that is passed to codecs.register()
|
||||
"""
|
||||
|
||||
if name != 'teletex':
|
||||
return None
|
||||
|
||||
return codecs.CodecInfo(
|
||||
name='teletex',
|
||||
encode=TeletexCodec().encode,
|
||||
decode=TeletexCodec().decode,
|
||||
incrementalencoder=TeletexIncrementalEncoder,
|
||||
incrementaldecoder=TeletexIncrementalDecoder,
|
||||
streamreader=TeletexStreamReader,
|
||||
streamwriter=TeletexStreamWriter,
|
||||
)
|
||||
|
||||
|
||||
def register():
|
||||
"""
|
||||
Registers the teletex codec
|
||||
"""
|
||||
|
||||
codecs.register(teletex_search_function)
|
||||
|
||||
|
||||
# http://en.wikipedia.org/wiki/ITU_T.61
|
||||
DECODING_TABLE = (
|
||||
'\u0000'
|
||||
'\u0001'
|
||||
'\u0002'
|
||||
'\u0003'
|
||||
'\u0004'
|
||||
'\u0005'
|
||||
'\u0006'
|
||||
'\u0007'
|
||||
'\u0008'
|
||||
'\u0009'
|
||||
'\u000A'
|
||||
'\u000B'
|
||||
'\u000C'
|
||||
'\u000D'
|
||||
'\u000E'
|
||||
'\u000F'
|
||||
'\u0010'
|
||||
'\u0011'
|
||||
'\u0012'
|
||||
'\u0013'
|
||||
'\u0014'
|
||||
'\u0015'
|
||||
'\u0016'
|
||||
'\u0017'
|
||||
'\u0018'
|
||||
'\u0019'
|
||||
'\u001A'
|
||||
'\u001B'
|
||||
'\u001C'
|
||||
'\u001D'
|
||||
'\u001E'
|
||||
'\u001F'
|
||||
'\u0020'
|
||||
'\u0021'
|
||||
'\u0022'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\u0025'
|
||||
'\u0026'
|
||||
'\u0027'
|
||||
'\u0028'
|
||||
'\u0029'
|
||||
'\u002A'
|
||||
'\u002B'
|
||||
'\u002C'
|
||||
'\u002D'
|
||||
'\u002E'
|
||||
'\u002F'
|
||||
'\u0030'
|
||||
'\u0031'
|
||||
'\u0032'
|
||||
'\u0033'
|
||||
'\u0034'
|
||||
'\u0035'
|
||||
'\u0036'
|
||||
'\u0037'
|
||||
'\u0038'
|
||||
'\u0039'
|
||||
'\u003A'
|
||||
'\u003B'
|
||||
'\u003C'
|
||||
'\u003D'
|
||||
'\u003E'
|
||||
'\u003F'
|
||||
'\u0040'
|
||||
'\u0041'
|
||||
'\u0042'
|
||||
'\u0043'
|
||||
'\u0044'
|
||||
'\u0045'
|
||||
'\u0046'
|
||||
'\u0047'
|
||||
'\u0048'
|
||||
'\u0049'
|
||||
'\u004A'
|
||||
'\u004B'
|
||||
'\u004C'
|
||||
'\u004D'
|
||||
'\u004E'
|
||||
'\u004F'
|
||||
'\u0050'
|
||||
'\u0051'
|
||||
'\u0052'
|
||||
'\u0053'
|
||||
'\u0054'
|
||||
'\u0055'
|
||||
'\u0056'
|
||||
'\u0057'
|
||||
'\u0058'
|
||||
'\u0059'
|
||||
'\u005A'
|
||||
'\u005B'
|
||||
'\ufffe'
|
||||
'\u005D'
|
||||
'\ufffe'
|
||||
'\u005F'
|
||||
'\ufffe'
|
||||
'\u0061'
|
||||
'\u0062'
|
||||
'\u0063'
|
||||
'\u0064'
|
||||
'\u0065'
|
||||
'\u0066'
|
||||
'\u0067'
|
||||
'\u0068'
|
||||
'\u0069'
|
||||
'\u006A'
|
||||
'\u006B'
|
||||
'\u006C'
|
||||
'\u006D'
|
||||
'\u006E'
|
||||
'\u006F'
|
||||
'\u0070'
|
||||
'\u0071'
|
||||
'\u0072'
|
||||
'\u0073'
|
||||
'\u0074'
|
||||
'\u0075'
|
||||
'\u0076'
|
||||
'\u0077'
|
||||
'\u0078'
|
||||
'\u0079'
|
||||
'\u007A'
|
||||
'\ufffe'
|
||||
'\u007C'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\u007F'
|
||||
'\u0080'
|
||||
'\u0081'
|
||||
'\u0082'
|
||||
'\u0083'
|
||||
'\u0084'
|
||||
'\u0085'
|
||||
'\u0086'
|
||||
'\u0087'
|
||||
'\u0088'
|
||||
'\u0089'
|
||||
'\u008A'
|
||||
'\u008B'
|
||||
'\u008C'
|
||||
'\u008D'
|
||||
'\u008E'
|
||||
'\u008F'
|
||||
'\u0090'
|
||||
'\u0091'
|
||||
'\u0092'
|
||||
'\u0093'
|
||||
'\u0094'
|
||||
'\u0095'
|
||||
'\u0096'
|
||||
'\u0097'
|
||||
'\u0098'
|
||||
'\u0099'
|
||||
'\u009A'
|
||||
'\u009B'
|
||||
'\u009C'
|
||||
'\u009D'
|
||||
'\u009E'
|
||||
'\u009F'
|
||||
'\u00A0'
|
||||
'\u00A1'
|
||||
'\u00A2'
|
||||
'\u00A3'
|
||||
'\u0024'
|
||||
'\u00A5'
|
||||
'\u0023'
|
||||
'\u00A7'
|
||||
'\u00A4'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\u00AB'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\u00B0'
|
||||
'\u00B1'
|
||||
'\u00B2'
|
||||
'\u00B3'
|
||||
'\u00D7'
|
||||
'\u00B5'
|
||||
'\u00B6'
|
||||
'\u00B7'
|
||||
'\u00F7'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\u00BB'
|
||||
'\u00BC'
|
||||
'\u00BD'
|
||||
'\u00BE'
|
||||
'\u00BF'
|
||||
'\ufffe'
|
||||
'\u0300'
|
||||
'\u0301'
|
||||
'\u0302'
|
||||
'\u0303'
|
||||
'\u0304'
|
||||
'\u0306'
|
||||
'\u0307'
|
||||
'\u0308'
|
||||
'\ufffe'
|
||||
'\u030A'
|
||||
'\u0327'
|
||||
'\u0332'
|
||||
'\u030B'
|
||||
'\u0328'
|
||||
'\u030C'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\ufffe'
|
||||
'\u2126'
|
||||
'\u00C6'
|
||||
'\u00D0'
|
||||
'\u00AA'
|
||||
'\u0126'
|
||||
'\ufffe'
|
||||
'\u0132'
|
||||
'\u013F'
|
||||
'\u0141'
|
||||
'\u00D8'
|
||||
'\u0152'
|
||||
'\u00BA'
|
||||
'\u00DE'
|
||||
'\u0166'
|
||||
'\u014A'
|
||||
'\u0149'
|
||||
'\u0138'
|
||||
'\u00E6'
|
||||
'\u0111'
|
||||
'\u00F0'
|
||||
'\u0127'
|
||||
'\u0131'
|
||||
'\u0133'
|
||||
'\u0140'
|
||||
'\u0142'
|
||||
'\u00F8'
|
||||
'\u0153'
|
||||
'\u00DF'
|
||||
'\u00FE'
|
||||
'\u0167'
|
||||
'\u014B'
|
||||
'\ufffe'
|
||||
)
|
||||
ENCODING_TABLE = codecs.charmap_build(DECODING_TABLE)
|
@ -1,46 +0,0 @@
|
||||
# coding: utf-8
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
import inspect
|
||||
import sys
|
||||
|
||||
|
||||
if sys.version_info < (3,):
|
||||
str_cls = unicode # noqa
|
||||
byte_cls = str
|
||||
int_types = (int, long) # noqa
|
||||
|
||||
def bytes_to_list(byte_string):
|
||||
return [ord(b) for b in byte_string]
|
||||
|
||||
chr_cls = chr
|
||||
|
||||
else:
|
||||
str_cls = str
|
||||
byte_cls = bytes
|
||||
int_types = int
|
||||
|
||||
bytes_to_list = list
|
||||
|
||||
def chr_cls(num):
|
||||
return bytes([num])
|
||||
|
||||
|
||||
def type_name(value):
|
||||
"""
|
||||
Returns a user-readable name for the type of an object
|
||||
|
||||
:param value:
|
||||
A value to get the type name of
|
||||
|
||||
:return:
|
||||
A unicode string of the object's type name
|
||||
"""
|
||||
|
||||
if inspect.isclass(value):
|
||||
cls = value
|
||||
else:
|
||||
cls = value.__class__
|
||||
if cls.__module__ in set(['builtins', '__builtin__']):
|
||||
return cls.__name__
|
||||
return '%s.%s' % (cls.__module__, cls.__name__)
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -1,536 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
ASN.1 type classes for certificate revocation lists (CRL). Exports the
|
||||
following items:
|
||||
|
||||
- CertificateList()
|
||||
|
||||
Other type classes are defined that help compose the types listed above.
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
import hashlib
|
||||
|
||||
from .algos import SignedDigestAlgorithm
|
||||
from .core import (
|
||||
Boolean,
|
||||
Enumerated,
|
||||
GeneralizedTime,
|
||||
Integer,
|
||||
ObjectIdentifier,
|
||||
OctetBitString,
|
||||
ParsableOctetString,
|
||||
Sequence,
|
||||
SequenceOf,
|
||||
)
|
||||
from .x509 import (
|
||||
AuthorityInfoAccessSyntax,
|
||||
AuthorityKeyIdentifier,
|
||||
CRLDistributionPoints,
|
||||
DistributionPointName,
|
||||
GeneralNames,
|
||||
Name,
|
||||
ReasonFlags,
|
||||
Time,
|
||||
)
|
||||
|
||||
|
||||
# The structures in this file are taken from https://tools.ietf.org/html/rfc5280
|
||||
|
||||
|
||||
class Version(Integer):
|
||||
_map = {
|
||||
0: 'v1',
|
||||
1: 'v2',
|
||||
2: 'v3',
|
||||
}
|
||||
|
||||
|
||||
class IssuingDistributionPoint(Sequence):
|
||||
_fields = [
|
||||
('distribution_point', DistributionPointName, {'explicit': 0, 'optional': True}),
|
||||
('only_contains_user_certs', Boolean, {'implicit': 1, 'default': False}),
|
||||
('only_contains_ca_certs', Boolean, {'implicit': 2, 'default': False}),
|
||||
('only_some_reasons', ReasonFlags, {'implicit': 3, 'optional': True}),
|
||||
('indirect_crl', Boolean, {'implicit': 4, 'default': False}),
|
||||
('only_contains_attribute_certs', Boolean, {'implicit': 5, 'default': False}),
|
||||
]
|
||||
|
||||
|
||||
class TBSCertListExtensionId(ObjectIdentifier):
|
||||
_map = {
|
||||
'2.5.29.18': 'issuer_alt_name',
|
||||
'2.5.29.20': 'crl_number',
|
||||
'2.5.29.27': 'delta_crl_indicator',
|
||||
'2.5.29.28': 'issuing_distribution_point',
|
||||
'2.5.29.35': 'authority_key_identifier',
|
||||
'2.5.29.46': 'freshest_crl',
|
||||
'1.3.6.1.5.5.7.1.1': 'authority_information_access',
|
||||
}
|
||||
|
||||
|
||||
class TBSCertListExtension(Sequence):
|
||||
_fields = [
|
||||
('extn_id', TBSCertListExtensionId),
|
||||
('critical', Boolean, {'default': False}),
|
||||
('extn_value', ParsableOctetString),
|
||||
]
|
||||
|
||||
_oid_pair = ('extn_id', 'extn_value')
|
||||
_oid_specs = {
|
||||
'issuer_alt_name': GeneralNames,
|
||||
'crl_number': Integer,
|
||||
'delta_crl_indicator': Integer,
|
||||
'issuing_distribution_point': IssuingDistributionPoint,
|
||||
'authority_key_identifier': AuthorityKeyIdentifier,
|
||||
'freshest_crl': CRLDistributionPoints,
|
||||
'authority_information_access': AuthorityInfoAccessSyntax,
|
||||
}
|
||||
|
||||
|
||||
class TBSCertListExtensions(SequenceOf):
|
||||
_child_spec = TBSCertListExtension
|
||||
|
||||
|
||||
class CRLReason(Enumerated):
|
||||
_map = {
|
||||
0: 'unspecified',
|
||||
1: 'key_compromise',
|
||||
2: 'ca_compromise',
|
||||
3: 'affiliation_changed',
|
||||
4: 'superseded',
|
||||
5: 'cessation_of_operation',
|
||||
6: 'certificate_hold',
|
||||
8: 'remove_from_crl',
|
||||
9: 'privilege_withdrawn',
|
||||
10: 'aa_compromise',
|
||||
}
|
||||
|
||||
@property
|
||||
def human_friendly(self):
|
||||
"""
|
||||
:return:
|
||||
A unicode string with revocation description that is suitable to
|
||||
show to end-users. Starts with a lower case letter and phrased in
|
||||
such a way that it makes sense after the phrase "because of" or
|
||||
"due to".
|
||||
"""
|
||||
|
||||
return {
|
||||
'unspecified': 'an unspecified reason',
|
||||
'key_compromise': 'a compromised key',
|
||||
'ca_compromise': 'the CA being compromised',
|
||||
'affiliation_changed': 'an affiliation change',
|
||||
'superseded': 'certificate supersession',
|
||||
'cessation_of_operation': 'a cessation of operation',
|
||||
'certificate_hold': 'a certificate hold',
|
||||
'remove_from_crl': 'removal from the CRL',
|
||||
'privilege_withdrawn': 'privilege withdrawl',
|
||||
'aa_compromise': 'the AA being compromised',
|
||||
}[self.native]
|
||||
|
||||
|
||||
class CRLEntryExtensionId(ObjectIdentifier):
|
||||
_map = {
|
||||
'2.5.29.21': 'crl_reason',
|
||||
'2.5.29.23': 'hold_instruction_code',
|
||||
'2.5.29.24': 'invalidity_date',
|
||||
'2.5.29.29': 'certificate_issuer',
|
||||
}
|
||||
|
||||
|
||||
class CRLEntryExtension(Sequence):
|
||||
_fields = [
|
||||
('extn_id', CRLEntryExtensionId),
|
||||
('critical', Boolean, {'default': False}),
|
||||
('extn_value', ParsableOctetString),
|
||||
]
|
||||
|
||||
_oid_pair = ('extn_id', 'extn_value')
|
||||
_oid_specs = {
|
||||
'crl_reason': CRLReason,
|
||||
'hold_instruction_code': ObjectIdentifier,
|
||||
'invalidity_date': GeneralizedTime,
|
||||
'certificate_issuer': GeneralNames,
|
||||
}
|
||||
|
||||
|
||||
class CRLEntryExtensions(SequenceOf):
|
||||
_child_spec = CRLEntryExtension
|
||||
|
||||
|
||||
class RevokedCertificate(Sequence):
|
||||
_fields = [
|
||||
('user_certificate', Integer),
|
||||
('revocation_date', Time),
|
||||
('crl_entry_extensions', CRLEntryExtensions, {'optional': True}),
|
||||
]
|
||||
|
||||
_processed_extensions = False
|
||||
_critical_extensions = None
|
||||
_crl_reason_value = None
|
||||
_invalidity_date_value = None
|
||||
_certificate_issuer_value = None
|
||||
_issuer_name = False
|
||||
|
||||
def _set_extensions(self):
|
||||
"""
|
||||
Sets common named extensions to private attributes and creates a list
|
||||
of critical extensions
|
||||
"""
|
||||
|
||||
self._critical_extensions = set()
|
||||
|
||||
for extension in self['crl_entry_extensions']:
|
||||
name = extension['extn_id'].native
|
||||
attribute_name = '_%s_value' % name
|
||||
if hasattr(self, attribute_name):
|
||||
setattr(self, attribute_name, extension['extn_value'].parsed)
|
||||
if extension['critical'].native:
|
||||
self._critical_extensions.add(name)
|
||||
|
||||
self._processed_extensions = True
|
||||
|
||||
@property
|
||||
def critical_extensions(self):
|
||||
"""
|
||||
Returns a set of the names (or OID if not a known extension) of the
|
||||
extensions marked as critical
|
||||
|
||||
:return:
|
||||
A set of unicode strings
|
||||
"""
|
||||
|
||||
if not self._processed_extensions:
|
||||
self._set_extensions()
|
||||
return self._critical_extensions
|
||||
|
||||
@property
|
||||
def crl_reason_value(self):
|
||||
"""
|
||||
This extension indicates the reason that a certificate was revoked.
|
||||
|
||||
:return:
|
||||
None or a CRLReason object
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._crl_reason_value
|
||||
|
||||
@property
|
||||
def invalidity_date_value(self):
|
||||
"""
|
||||
This extension indicates the suspected date/time the private key was
|
||||
compromised or the certificate became invalid. This would usually be
|
||||
before the revocation date, which is when the CA processed the
|
||||
revocation.
|
||||
|
||||
:return:
|
||||
None or a GeneralizedTime object
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._invalidity_date_value
|
||||
|
||||
@property
|
||||
def certificate_issuer_value(self):
|
||||
"""
|
||||
This extension indicates the issuer of the certificate in question,
|
||||
and is used in indirect CRLs. CRL entries without this extension are
|
||||
for certificates issued from the last seen issuer.
|
||||
|
||||
:return:
|
||||
None or an x509.GeneralNames object
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._certificate_issuer_value
|
||||
|
||||
@property
|
||||
def issuer_name(self):
|
||||
"""
|
||||
:return:
|
||||
None, or an asn1crypto.x509.Name object for the issuer of the cert
|
||||
"""
|
||||
|
||||
if self._issuer_name is False:
|
||||
self._issuer_name = None
|
||||
if self.certificate_issuer_value:
|
||||
for general_name in self.certificate_issuer_value:
|
||||
if general_name.name == 'directory_name':
|
||||
self._issuer_name = general_name.chosen
|
||||
break
|
||||
return self._issuer_name
|
||||
|
||||
|
||||
class RevokedCertificates(SequenceOf):
|
||||
_child_spec = RevokedCertificate
|
||||
|
||||
|
||||
class TbsCertList(Sequence):
|
||||
_fields = [
|
||||
('version', Version, {'optional': True}),
|
||||
('signature', SignedDigestAlgorithm),
|
||||
('issuer', Name),
|
||||
('this_update', Time),
|
||||
('next_update', Time, {'optional': True}),
|
||||
('revoked_certificates', RevokedCertificates, {'optional': True}),
|
||||
('crl_extensions', TBSCertListExtensions, {'explicit': 0, 'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class CertificateList(Sequence):
|
||||
_fields = [
|
||||
('tbs_cert_list', TbsCertList),
|
||||
('signature_algorithm', SignedDigestAlgorithm),
|
||||
('signature', OctetBitString),
|
||||
]
|
||||
|
||||
_processed_extensions = False
|
||||
_critical_extensions = None
|
||||
_issuer_alt_name_value = None
|
||||
_crl_number_value = None
|
||||
_delta_crl_indicator_value = None
|
||||
_issuing_distribution_point_value = None
|
||||
_authority_key_identifier_value = None
|
||||
_freshest_crl_value = None
|
||||
_authority_information_access_value = None
|
||||
_issuer_cert_urls = None
|
||||
_delta_crl_distribution_points = None
|
||||
_sha1 = None
|
||||
_sha256 = None
|
||||
|
||||
def _set_extensions(self):
|
||||
"""
|
||||
Sets common named extensions to private attributes and creates a list
|
||||
of critical extensions
|
||||
"""
|
||||
|
||||
self._critical_extensions = set()
|
||||
|
||||
for extension in self['tbs_cert_list']['crl_extensions']:
|
||||
name = extension['extn_id'].native
|
||||
attribute_name = '_%s_value' % name
|
||||
if hasattr(self, attribute_name):
|
||||
setattr(self, attribute_name, extension['extn_value'].parsed)
|
||||
if extension['critical'].native:
|
||||
self._critical_extensions.add(name)
|
||||
|
||||
self._processed_extensions = True
|
||||
|
||||
@property
|
||||
def critical_extensions(self):
|
||||
"""
|
||||
Returns a set of the names (or OID if not a known extension) of the
|
||||
extensions marked as critical
|
||||
|
||||
:return:
|
||||
A set of unicode strings
|
||||
"""
|
||||
|
||||
if not self._processed_extensions:
|
||||
self._set_extensions()
|
||||
return self._critical_extensions
|
||||
|
||||
@property
|
||||
def issuer_alt_name_value(self):
|
||||
"""
|
||||
This extension allows associating one or more alternative names with
|
||||
the issuer of the CRL.
|
||||
|
||||
:return:
|
||||
None or an x509.GeneralNames object
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._issuer_alt_name_value
|
||||
|
||||
@property
|
||||
def crl_number_value(self):
|
||||
"""
|
||||
This extension adds a monotonically increasing number to the CRL and is
|
||||
used to distinguish different versions of the CRL.
|
||||
|
||||
:return:
|
||||
None or an Integer object
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._crl_number_value
|
||||
|
||||
@property
|
||||
def delta_crl_indicator_value(self):
|
||||
"""
|
||||
This extension indicates a CRL is a delta CRL, and contains the CRL
|
||||
number of the base CRL that it is a delta from.
|
||||
|
||||
:return:
|
||||
None or an Integer object
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._delta_crl_indicator_value
|
||||
|
||||
@property
|
||||
def issuing_distribution_point_value(self):
|
||||
"""
|
||||
This extension includes information about what types of revocations
|
||||
and certificates are part of the CRL.
|
||||
|
||||
:return:
|
||||
None or an IssuingDistributionPoint object
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._issuing_distribution_point_value
|
||||
|
||||
@property
|
||||
def authority_key_identifier_value(self):
|
||||
"""
|
||||
This extension helps in identifying the public key with which to
|
||||
validate the authenticity of the CRL.
|
||||
|
||||
:return:
|
||||
None or an AuthorityKeyIdentifier object
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._authority_key_identifier_value
|
||||
|
||||
@property
|
||||
def freshest_crl_value(self):
|
||||
"""
|
||||
This extension is used in complete CRLs to indicate where a delta CRL
|
||||
may be located.
|
||||
|
||||
:return:
|
||||
None or a CRLDistributionPoints object
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._freshest_crl_value
|
||||
|
||||
@property
|
||||
def authority_information_access_value(self):
|
||||
"""
|
||||
This extension is used to provide a URL with which to download the
|
||||
certificate used to sign this CRL.
|
||||
|
||||
:return:
|
||||
None or an AuthorityInfoAccessSyntax object
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._authority_information_access_value
|
||||
|
||||
@property
|
||||
def issuer(self):
|
||||
"""
|
||||
:return:
|
||||
An asn1crypto.x509.Name object for the issuer of the CRL
|
||||
"""
|
||||
|
||||
return self['tbs_cert_list']['issuer']
|
||||
|
||||
@property
|
||||
def authority_key_identifier(self):
|
||||
"""
|
||||
:return:
|
||||
None or a byte string of the key_identifier from the authority key
|
||||
identifier extension
|
||||
"""
|
||||
|
||||
if not self.authority_key_identifier_value:
|
||||
return None
|
||||
|
||||
return self.authority_key_identifier_value['key_identifier'].native
|
||||
|
||||
@property
|
||||
def issuer_cert_urls(self):
|
||||
"""
|
||||
:return:
|
||||
A list of unicode strings that are URLs that should contain either
|
||||
an individual DER-encoded X.509 certificate, or a DER-encoded CMS
|
||||
message containing multiple certificates
|
||||
"""
|
||||
|
||||
if self._issuer_cert_urls is None:
|
||||
self._issuer_cert_urls = []
|
||||
if self.authority_information_access_value:
|
||||
for entry in self.authority_information_access_value:
|
||||
if entry['access_method'].native == 'ca_issuers':
|
||||
location = entry['access_location']
|
||||
if location.name != 'uniform_resource_identifier':
|
||||
continue
|
||||
url = location.native
|
||||
if url.lower()[0:7] == 'http://':
|
||||
self._issuer_cert_urls.append(url)
|
||||
return self._issuer_cert_urls
|
||||
|
||||
@property
|
||||
def delta_crl_distribution_points(self):
|
||||
"""
|
||||
Returns delta CRL URLs - only applies to complete CRLs
|
||||
|
||||
:return:
|
||||
A list of zero or more DistributionPoint objects
|
||||
"""
|
||||
|
||||
if self._delta_crl_distribution_points is None:
|
||||
self._delta_crl_distribution_points = []
|
||||
|
||||
if self.freshest_crl_value is not None:
|
||||
for distribution_point in self.freshest_crl_value:
|
||||
distribution_point_name = distribution_point['distribution_point']
|
||||
# RFC 5280 indicates conforming CA should not use the relative form
|
||||
if distribution_point_name.name == 'name_relative_to_crl_issuer':
|
||||
continue
|
||||
# This library is currently only concerned with HTTP-based CRLs
|
||||
for general_name in distribution_point_name.chosen:
|
||||
if general_name.name == 'uniform_resource_identifier':
|
||||
self._delta_crl_distribution_points.append(distribution_point)
|
||||
|
||||
return self._delta_crl_distribution_points
|
||||
|
||||
@property
|
||||
def signature(self):
|
||||
"""
|
||||
:return:
|
||||
A byte string of the signature
|
||||
"""
|
||||
|
||||
return self['signature'].native
|
||||
|
||||
@property
|
||||
def sha1(self):
|
||||
"""
|
||||
:return:
|
||||
The SHA1 hash of the DER-encoded bytes of this certificate list
|
||||
"""
|
||||
|
||||
if self._sha1 is None:
|
||||
self._sha1 = hashlib.sha1(self.dump()).digest()
|
||||
return self._sha1
|
||||
|
||||
@property
|
||||
def sha256(self):
|
||||
"""
|
||||
:return:
|
||||
The SHA-256 hash of the DER-encoded bytes of this certificate list
|
||||
"""
|
||||
|
||||
if self._sha256 is None:
|
||||
self._sha256 = hashlib.sha256(self.dump()).digest()
|
||||
return self._sha256
|
@ -1,133 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
ASN.1 type classes for certificate signing requests (CSR). Exports the
|
||||
following items:
|
||||
|
||||
- CertificationRequest()
|
||||
|
||||
Other type classes are defined that help compose the types listed above.
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
from .algos import SignedDigestAlgorithm
|
||||
from .core import (
|
||||
Any,
|
||||
BitString,
|
||||
BMPString,
|
||||
Integer,
|
||||
ObjectIdentifier,
|
||||
OctetBitString,
|
||||
Sequence,
|
||||
SetOf,
|
||||
UTF8String
|
||||
)
|
||||
from .keys import PublicKeyInfo
|
||||
from .x509 import DirectoryString, Extensions, Name
|
||||
|
||||
|
||||
# The structures in this file are taken from https://tools.ietf.org/html/rfc2986
|
||||
# and https://tools.ietf.org/html/rfc2985
|
||||
|
||||
|
||||
class Version(Integer):
|
||||
_map = {
|
||||
0: 'v1',
|
||||
}
|
||||
|
||||
|
||||
class CSRAttributeType(ObjectIdentifier):
|
||||
_map = {
|
||||
'1.2.840.113549.1.9.7': 'challenge_password',
|
||||
'1.2.840.113549.1.9.9': 'extended_certificate_attributes',
|
||||
'1.2.840.113549.1.9.14': 'extension_request',
|
||||
# https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-wcce/a5eaae36-e9f3-4dc5-a687-bfa7115954f1
|
||||
'1.3.6.1.4.1.311.13.2.2': 'microsoft_enrollment_csp_provider',
|
||||
# https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-wcce/7c677cba-030d-48be-ba2b-01e407705f34
|
||||
'1.3.6.1.4.1.311.13.2.3': 'microsoft_os_version',
|
||||
# https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-wcce/64e5ff6d-c6dd-4578-92f7-b3d895f9b9c7
|
||||
'1.3.6.1.4.1.311.21.20': 'microsoft_request_client_info',
|
||||
}
|
||||
|
||||
|
||||
class SetOfDirectoryString(SetOf):
|
||||
_child_spec = DirectoryString
|
||||
|
||||
|
||||
class Attribute(Sequence):
|
||||
_fields = [
|
||||
('type', ObjectIdentifier),
|
||||
('values', SetOf, {'spec': Any}),
|
||||
]
|
||||
|
||||
|
||||
class SetOfAttributes(SetOf):
|
||||
_child_spec = Attribute
|
||||
|
||||
|
||||
class SetOfExtensions(SetOf):
|
||||
_child_spec = Extensions
|
||||
|
||||
|
||||
class MicrosoftEnrollmentCSProvider(Sequence):
|
||||
_fields = [
|
||||
('keyspec', Integer),
|
||||
('cspname', BMPString), # cryptographic service provider name
|
||||
('signature', BitString),
|
||||
]
|
||||
|
||||
|
||||
class SetOfMicrosoftEnrollmentCSProvider(SetOf):
|
||||
_child_spec = MicrosoftEnrollmentCSProvider
|
||||
|
||||
|
||||
class MicrosoftRequestClientInfo(Sequence):
|
||||
_fields = [
|
||||
('clientid', Integer),
|
||||
('machinename', UTF8String),
|
||||
('username', UTF8String),
|
||||
('processname', UTF8String),
|
||||
]
|
||||
|
||||
|
||||
class SetOfMicrosoftRequestClientInfo(SetOf):
|
||||
_child_spec = MicrosoftRequestClientInfo
|
||||
|
||||
|
||||
class CRIAttribute(Sequence):
|
||||
_fields = [
|
||||
('type', CSRAttributeType),
|
||||
('values', Any),
|
||||
]
|
||||
|
||||
_oid_pair = ('type', 'values')
|
||||
_oid_specs = {
|
||||
'challenge_password': SetOfDirectoryString,
|
||||
'extended_certificate_attributes': SetOfAttributes,
|
||||
'extension_request': SetOfExtensions,
|
||||
'microsoft_enrollment_csp_provider': SetOfMicrosoftEnrollmentCSProvider,
|
||||
'microsoft_os_version': SetOfDirectoryString,
|
||||
'microsoft_request_client_info': SetOfMicrosoftRequestClientInfo,
|
||||
}
|
||||
|
||||
|
||||
class CRIAttributes(SetOf):
|
||||
_child_spec = CRIAttribute
|
||||
|
||||
|
||||
class CertificationRequestInfo(Sequence):
|
||||
_fields = [
|
||||
('version', Version),
|
||||
('subject', Name),
|
||||
('subject_pk_info', PublicKeyInfo),
|
||||
('attributes', CRIAttributes, {'implicit': 0, 'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class CertificationRequest(Sequence):
|
||||
_fields = [
|
||||
('certification_request_info', CertificationRequestInfo),
|
||||
('signature_algorithm', SignedDigestAlgorithm),
|
||||
('signature', OctetBitString),
|
||||
]
|
File diff suppressed because it is too large
Load Diff
@ -1,703 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
ASN.1 type classes for the online certificate status protocol (OCSP). Exports
|
||||
the following items:
|
||||
|
||||
- OCSPRequest()
|
||||
- OCSPResponse()
|
||||
|
||||
Other type classes are defined that help compose the types listed above.
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
from ._errors import unwrap
|
||||
from .algos import DigestAlgorithm, SignedDigestAlgorithm
|
||||
from .core import (
|
||||
Boolean,
|
||||
Choice,
|
||||
Enumerated,
|
||||
GeneralizedTime,
|
||||
IA5String,
|
||||
Integer,
|
||||
Null,
|
||||
ObjectIdentifier,
|
||||
OctetBitString,
|
||||
OctetString,
|
||||
ParsableOctetString,
|
||||
Sequence,
|
||||
SequenceOf,
|
||||
)
|
||||
from .crl import AuthorityInfoAccessSyntax, CRLReason
|
||||
from .keys import PublicKeyAlgorithm
|
||||
from .x509 import Certificate, GeneralName, GeneralNames, Name
|
||||
|
||||
|
||||
# The structures in this file are taken from https://tools.ietf.org/html/rfc6960
|
||||
|
||||
|
||||
class Version(Integer):
|
||||
_map = {
|
||||
0: 'v1'
|
||||
}
|
||||
|
||||
|
||||
class CertId(Sequence):
|
||||
_fields = [
|
||||
('hash_algorithm', DigestAlgorithm),
|
||||
('issuer_name_hash', OctetString),
|
||||
('issuer_key_hash', OctetString),
|
||||
('serial_number', Integer),
|
||||
]
|
||||
|
||||
|
||||
class ServiceLocator(Sequence):
|
||||
_fields = [
|
||||
('issuer', Name),
|
||||
('locator', AuthorityInfoAccessSyntax),
|
||||
]
|
||||
|
||||
|
||||
class RequestExtensionId(ObjectIdentifier):
|
||||
_map = {
|
||||
'1.3.6.1.5.5.7.48.1.7': 'service_locator',
|
||||
}
|
||||
|
||||
|
||||
class RequestExtension(Sequence):
|
||||
_fields = [
|
||||
('extn_id', RequestExtensionId),
|
||||
('critical', Boolean, {'default': False}),
|
||||
('extn_value', ParsableOctetString),
|
||||
]
|
||||
|
||||
_oid_pair = ('extn_id', 'extn_value')
|
||||
_oid_specs = {
|
||||
'service_locator': ServiceLocator,
|
||||
}
|
||||
|
||||
|
||||
class RequestExtensions(SequenceOf):
|
||||
_child_spec = RequestExtension
|
||||
|
||||
|
||||
class Request(Sequence):
|
||||
_fields = [
|
||||
('req_cert', CertId),
|
||||
('single_request_extensions', RequestExtensions, {'explicit': 0, 'optional': True}),
|
||||
]
|
||||
|
||||
_processed_extensions = False
|
||||
_critical_extensions = None
|
||||
_service_locator_value = None
|
||||
|
||||
def _set_extensions(self):
|
||||
"""
|
||||
Sets common named extensions to private attributes and creates a list
|
||||
of critical extensions
|
||||
"""
|
||||
|
||||
self._critical_extensions = set()
|
||||
|
||||
for extension in self['single_request_extensions']:
|
||||
name = extension['extn_id'].native
|
||||
attribute_name = '_%s_value' % name
|
||||
if hasattr(self, attribute_name):
|
||||
setattr(self, attribute_name, extension['extn_value'].parsed)
|
||||
if extension['critical'].native:
|
||||
self._critical_extensions.add(name)
|
||||
|
||||
self._processed_extensions = True
|
||||
|
||||
@property
|
||||
def critical_extensions(self):
|
||||
"""
|
||||
Returns a set of the names (or OID if not a known extension) of the
|
||||
extensions marked as critical
|
||||
|
||||
:return:
|
||||
A set of unicode strings
|
||||
"""
|
||||
|
||||
if not self._processed_extensions:
|
||||
self._set_extensions()
|
||||
return self._critical_extensions
|
||||
|
||||
@property
|
||||
def service_locator_value(self):
|
||||
"""
|
||||
This extension is used when communicating with an OCSP responder that
|
||||
acts as a proxy for OCSP requests
|
||||
|
||||
:return:
|
||||
None or a ServiceLocator object
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._service_locator_value
|
||||
|
||||
|
||||
class Requests(SequenceOf):
|
||||
_child_spec = Request
|
||||
|
||||
|
||||
class ResponseType(ObjectIdentifier):
|
||||
_map = {
|
||||
'1.3.6.1.5.5.7.48.1.1': 'basic_ocsp_response',
|
||||
}
|
||||
|
||||
|
||||
class AcceptableResponses(SequenceOf):
|
||||
_child_spec = ResponseType
|
||||
|
||||
|
||||
class PreferredSignatureAlgorithm(Sequence):
|
||||
_fields = [
|
||||
('sig_identifier', SignedDigestAlgorithm),
|
||||
('cert_identifier', PublicKeyAlgorithm, {'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class PreferredSignatureAlgorithms(SequenceOf):
|
||||
_child_spec = PreferredSignatureAlgorithm
|
||||
|
||||
|
||||
class TBSRequestExtensionId(ObjectIdentifier):
|
||||
_map = {
|
||||
'1.3.6.1.5.5.7.48.1.2': 'nonce',
|
||||
'1.3.6.1.5.5.7.48.1.4': 'acceptable_responses',
|
||||
'1.3.6.1.5.5.7.48.1.8': 'preferred_signature_algorithms',
|
||||
}
|
||||
|
||||
|
||||
class TBSRequestExtension(Sequence):
|
||||
_fields = [
|
||||
('extn_id', TBSRequestExtensionId),
|
||||
('critical', Boolean, {'default': False}),
|
||||
('extn_value', ParsableOctetString),
|
||||
]
|
||||
|
||||
_oid_pair = ('extn_id', 'extn_value')
|
||||
_oid_specs = {
|
||||
'nonce': OctetString,
|
||||
'acceptable_responses': AcceptableResponses,
|
||||
'preferred_signature_algorithms': PreferredSignatureAlgorithms,
|
||||
}
|
||||
|
||||
|
||||
class TBSRequestExtensions(SequenceOf):
|
||||
_child_spec = TBSRequestExtension
|
||||
|
||||
|
||||
class TBSRequest(Sequence):
|
||||
_fields = [
|
||||
('version', Version, {'explicit': 0, 'default': 'v1'}),
|
||||
('requestor_name', GeneralName, {'explicit': 1, 'optional': True}),
|
||||
('request_list', Requests),
|
||||
('request_extensions', TBSRequestExtensions, {'explicit': 2, 'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class Certificates(SequenceOf):
|
||||
_child_spec = Certificate
|
||||
|
||||
|
||||
class Signature(Sequence):
|
||||
_fields = [
|
||||
('signature_algorithm', SignedDigestAlgorithm),
|
||||
('signature', OctetBitString),
|
||||
('certs', Certificates, {'explicit': 0, 'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class OCSPRequest(Sequence):
|
||||
_fields = [
|
||||
('tbs_request', TBSRequest),
|
||||
('optional_signature', Signature, {'explicit': 0, 'optional': True}),
|
||||
]
|
||||
|
||||
_processed_extensions = False
|
||||
_critical_extensions = None
|
||||
_nonce_value = None
|
||||
_acceptable_responses_value = None
|
||||
_preferred_signature_algorithms_value = None
|
||||
|
||||
def _set_extensions(self):
|
||||
"""
|
||||
Sets common named extensions to private attributes and creates a list
|
||||
of critical extensions
|
||||
"""
|
||||
|
||||
self._critical_extensions = set()
|
||||
|
||||
for extension in self['tbs_request']['request_extensions']:
|
||||
name = extension['extn_id'].native
|
||||
attribute_name = '_%s_value' % name
|
||||
if hasattr(self, attribute_name):
|
||||
setattr(self, attribute_name, extension['extn_value'].parsed)
|
||||
if extension['critical'].native:
|
||||
self._critical_extensions.add(name)
|
||||
|
||||
self._processed_extensions = True
|
||||
|
||||
@property
|
||||
def critical_extensions(self):
|
||||
"""
|
||||
Returns a set of the names (or OID if not a known extension) of the
|
||||
extensions marked as critical
|
||||
|
||||
:return:
|
||||
A set of unicode strings
|
||||
"""
|
||||
|
||||
if not self._processed_extensions:
|
||||
self._set_extensions()
|
||||
return self._critical_extensions
|
||||
|
||||
@property
|
||||
def nonce_value(self):
|
||||
"""
|
||||
This extension is used to prevent replay attacks by including a unique,
|
||||
random value with each request/response pair
|
||||
|
||||
:return:
|
||||
None or an OctetString object
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._nonce_value
|
||||
|
||||
@property
|
||||
def acceptable_responses_value(self):
|
||||
"""
|
||||
This extension is used to allow the client and server to communicate
|
||||
with alternative response formats other than just basic_ocsp_response,
|
||||
although no other formats are defined in the standard.
|
||||
|
||||
:return:
|
||||
None or an AcceptableResponses object
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._acceptable_responses_value
|
||||
|
||||
@property
|
||||
def preferred_signature_algorithms_value(self):
|
||||
"""
|
||||
This extension is used by the client to define what signature algorithms
|
||||
are preferred, including both the hash algorithm and the public key
|
||||
algorithm, with a level of detail down to even the public key algorithm
|
||||
parameters, such as curve name.
|
||||
|
||||
:return:
|
||||
None or a PreferredSignatureAlgorithms object
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._preferred_signature_algorithms_value
|
||||
|
||||
|
||||
class OCSPResponseStatus(Enumerated):
|
||||
_map = {
|
||||
0: 'successful',
|
||||
1: 'malformed_request',
|
||||
2: 'internal_error',
|
||||
3: 'try_later',
|
||||
5: 'sign_required',
|
||||
6: 'unauthorized',
|
||||
}
|
||||
|
||||
|
||||
class ResponderId(Choice):
|
||||
_alternatives = [
|
||||
('by_name', Name, {'explicit': 1}),
|
||||
('by_key', OctetString, {'explicit': 2}),
|
||||
]
|
||||
|
||||
|
||||
# Custom class to return a meaningful .native attribute from CertStatus()
|
||||
class StatusGood(Null):
|
||||
def set(self, value):
|
||||
"""
|
||||
Sets the value of the object
|
||||
|
||||
:param value:
|
||||
None or 'good'
|
||||
"""
|
||||
|
||||
if value is not None and value != 'good' and not isinstance(value, Null):
|
||||
raise ValueError(unwrap(
|
||||
'''
|
||||
value must be one of None, "good", not %s
|
||||
''',
|
||||
repr(value)
|
||||
))
|
||||
|
||||
self.contents = b''
|
||||
|
||||
@property
|
||||
def native(self):
|
||||
return 'good'
|
||||
|
||||
|
||||
# Custom class to return a meaningful .native attribute from CertStatus()
|
||||
class StatusUnknown(Null):
|
||||
def set(self, value):
|
||||
"""
|
||||
Sets the value of the object
|
||||
|
||||
:param value:
|
||||
None or 'unknown'
|
||||
"""
|
||||
|
||||
if value is not None and value != 'unknown' and not isinstance(value, Null):
|
||||
raise ValueError(unwrap(
|
||||
'''
|
||||
value must be one of None, "unknown", not %s
|
||||
''',
|
||||
repr(value)
|
||||
))
|
||||
|
||||
self.contents = b''
|
||||
|
||||
@property
|
||||
def native(self):
|
||||
return 'unknown'
|
||||
|
||||
|
||||
class RevokedInfo(Sequence):
|
||||
_fields = [
|
||||
('revocation_time', GeneralizedTime),
|
||||
('revocation_reason', CRLReason, {'explicit': 0, 'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class CertStatus(Choice):
|
||||
_alternatives = [
|
||||
('good', StatusGood, {'implicit': 0}),
|
||||
('revoked', RevokedInfo, {'implicit': 1}),
|
||||
('unknown', StatusUnknown, {'implicit': 2}),
|
||||
]
|
||||
|
||||
|
||||
class CrlId(Sequence):
|
||||
_fields = [
|
||||
('crl_url', IA5String, {'explicit': 0, 'optional': True}),
|
||||
('crl_num', Integer, {'explicit': 1, 'optional': True}),
|
||||
('crl_time', GeneralizedTime, {'explicit': 2, 'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class SingleResponseExtensionId(ObjectIdentifier):
|
||||
_map = {
|
||||
'1.3.6.1.5.5.7.48.1.3': 'crl',
|
||||
'1.3.6.1.5.5.7.48.1.6': 'archive_cutoff',
|
||||
# These are CRLEntryExtension values from
|
||||
# https://tools.ietf.org/html/rfc5280
|
||||
'2.5.29.21': 'crl_reason',
|
||||
'2.5.29.24': 'invalidity_date',
|
||||
'2.5.29.29': 'certificate_issuer',
|
||||
# https://tools.ietf.org/html/rfc6962.html#page-13
|
||||
'1.3.6.1.4.1.11129.2.4.5': 'signed_certificate_timestamp_list',
|
||||
}
|
||||
|
||||
|
||||
class SingleResponseExtension(Sequence):
|
||||
_fields = [
|
||||
('extn_id', SingleResponseExtensionId),
|
||||
('critical', Boolean, {'default': False}),
|
||||
('extn_value', ParsableOctetString),
|
||||
]
|
||||
|
||||
_oid_pair = ('extn_id', 'extn_value')
|
||||
_oid_specs = {
|
||||
'crl': CrlId,
|
||||
'archive_cutoff': GeneralizedTime,
|
||||
'crl_reason': CRLReason,
|
||||
'invalidity_date': GeneralizedTime,
|
||||
'certificate_issuer': GeneralNames,
|
||||
'signed_certificate_timestamp_list': OctetString,
|
||||
}
|
||||
|
||||
|
||||
class SingleResponseExtensions(SequenceOf):
|
||||
_child_spec = SingleResponseExtension
|
||||
|
||||
|
||||
class SingleResponse(Sequence):
|
||||
_fields = [
|
||||
('cert_id', CertId),
|
||||
('cert_status', CertStatus),
|
||||
('this_update', GeneralizedTime),
|
||||
('next_update', GeneralizedTime, {'explicit': 0, 'optional': True}),
|
||||
('single_extensions', SingleResponseExtensions, {'explicit': 1, 'optional': True}),
|
||||
]
|
||||
|
||||
_processed_extensions = False
|
||||
_critical_extensions = None
|
||||
_crl_value = None
|
||||
_archive_cutoff_value = None
|
||||
_crl_reason_value = None
|
||||
_invalidity_date_value = None
|
||||
_certificate_issuer_value = None
|
||||
|
||||
def _set_extensions(self):
|
||||
"""
|
||||
Sets common named extensions to private attributes and creates a list
|
||||
of critical extensions
|
||||
"""
|
||||
|
||||
self._critical_extensions = set()
|
||||
|
||||
for extension in self['single_extensions']:
|
||||
name = extension['extn_id'].native
|
||||
attribute_name = '_%s_value' % name
|
||||
if hasattr(self, attribute_name):
|
||||
setattr(self, attribute_name, extension['extn_value'].parsed)
|
||||
if extension['critical'].native:
|
||||
self._critical_extensions.add(name)
|
||||
|
||||
self._processed_extensions = True
|
||||
|
||||
@property
|
||||
def critical_extensions(self):
|
||||
"""
|
||||
Returns a set of the names (or OID if not a known extension) of the
|
||||
extensions marked as critical
|
||||
|
||||
:return:
|
||||
A set of unicode strings
|
||||
"""
|
||||
|
||||
if not self._processed_extensions:
|
||||
self._set_extensions()
|
||||
return self._critical_extensions
|
||||
|
||||
@property
|
||||
def crl_value(self):
|
||||
"""
|
||||
This extension is used to locate the CRL that a certificate's revocation
|
||||
is contained within.
|
||||
|
||||
:return:
|
||||
None or a CrlId object
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._crl_value
|
||||
|
||||
@property
|
||||
def archive_cutoff_value(self):
|
||||
"""
|
||||
This extension is used to indicate the date at which an archived
|
||||
(historical) certificate status entry will no longer be available.
|
||||
|
||||
:return:
|
||||
None or a GeneralizedTime object
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._archive_cutoff_value
|
||||
|
||||
@property
|
||||
def crl_reason_value(self):
|
||||
"""
|
||||
This extension indicates the reason that a certificate was revoked.
|
||||
|
||||
:return:
|
||||
None or a CRLReason object
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._crl_reason_value
|
||||
|
||||
@property
|
||||
def invalidity_date_value(self):
|
||||
"""
|
||||
This extension indicates the suspected date/time the private key was
|
||||
compromised or the certificate became invalid. This would usually be
|
||||
before the revocation date, which is when the CA processed the
|
||||
revocation.
|
||||
|
||||
:return:
|
||||
None or a GeneralizedTime object
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._invalidity_date_value
|
||||
|
||||
@property
|
||||
def certificate_issuer_value(self):
|
||||
"""
|
||||
This extension indicates the issuer of the certificate in question.
|
||||
|
||||
:return:
|
||||
None or an x509.GeneralNames object
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._certificate_issuer_value
|
||||
|
||||
|
||||
class Responses(SequenceOf):
|
||||
_child_spec = SingleResponse
|
||||
|
||||
|
||||
class ResponseDataExtensionId(ObjectIdentifier):
|
||||
_map = {
|
||||
'1.3.6.1.5.5.7.48.1.2': 'nonce',
|
||||
'1.3.6.1.5.5.7.48.1.9': 'extended_revoke',
|
||||
}
|
||||
|
||||
|
||||
class ResponseDataExtension(Sequence):
|
||||
_fields = [
|
||||
('extn_id', ResponseDataExtensionId),
|
||||
('critical', Boolean, {'default': False}),
|
||||
('extn_value', ParsableOctetString),
|
||||
]
|
||||
|
||||
_oid_pair = ('extn_id', 'extn_value')
|
||||
_oid_specs = {
|
||||
'nonce': OctetString,
|
||||
'extended_revoke': Null,
|
||||
}
|
||||
|
||||
|
||||
class ResponseDataExtensions(SequenceOf):
|
||||
_child_spec = ResponseDataExtension
|
||||
|
||||
|
||||
class ResponseData(Sequence):
|
||||
_fields = [
|
||||
('version', Version, {'explicit': 0, 'default': 'v1'}),
|
||||
('responder_id', ResponderId),
|
||||
('produced_at', GeneralizedTime),
|
||||
('responses', Responses),
|
||||
('response_extensions', ResponseDataExtensions, {'explicit': 1, 'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class BasicOCSPResponse(Sequence):
|
||||
_fields = [
|
||||
('tbs_response_data', ResponseData),
|
||||
('signature_algorithm', SignedDigestAlgorithm),
|
||||
('signature', OctetBitString),
|
||||
('certs', Certificates, {'explicit': 0, 'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class ResponseBytes(Sequence):
|
||||
_fields = [
|
||||
('response_type', ResponseType),
|
||||
('response', ParsableOctetString),
|
||||
]
|
||||
|
||||
_oid_pair = ('response_type', 'response')
|
||||
_oid_specs = {
|
||||
'basic_ocsp_response': BasicOCSPResponse,
|
||||
}
|
||||
|
||||
|
||||
class OCSPResponse(Sequence):
|
||||
_fields = [
|
||||
('response_status', OCSPResponseStatus),
|
||||
('response_bytes', ResponseBytes, {'explicit': 0, 'optional': True}),
|
||||
]
|
||||
|
||||
_processed_extensions = False
|
||||
_critical_extensions = None
|
||||
_nonce_value = None
|
||||
_extended_revoke_value = None
|
||||
|
||||
def _set_extensions(self):
|
||||
"""
|
||||
Sets common named extensions to private attributes and creates a list
|
||||
of critical extensions
|
||||
"""
|
||||
|
||||
self._critical_extensions = set()
|
||||
|
||||
for extension in self['response_bytes']['response'].parsed['tbs_response_data']['response_extensions']:
|
||||
name = extension['extn_id'].native
|
||||
attribute_name = '_%s_value' % name
|
||||
if hasattr(self, attribute_name):
|
||||
setattr(self, attribute_name, extension['extn_value'].parsed)
|
||||
if extension['critical'].native:
|
||||
self._critical_extensions.add(name)
|
||||
|
||||
self._processed_extensions = True
|
||||
|
||||
@property
|
||||
def critical_extensions(self):
|
||||
"""
|
||||
Returns a set of the names (or OID if not a known extension) of the
|
||||
extensions marked as critical
|
||||
|
||||
:return:
|
||||
A set of unicode strings
|
||||
"""
|
||||
|
||||
if not self._processed_extensions:
|
||||
self._set_extensions()
|
||||
return self._critical_extensions
|
||||
|
||||
@property
|
||||
def nonce_value(self):
|
||||
"""
|
||||
This extension is used to prevent replay attacks on the request/response
|
||||
exchange
|
||||
|
||||
:return:
|
||||
None or an OctetString object
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._nonce_value
|
||||
|
||||
@property
|
||||
def extended_revoke_value(self):
|
||||
"""
|
||||
This extension is used to signal that the responder will return a
|
||||
"revoked" status for non-issued certificates.
|
||||
|
||||
:return:
|
||||
None or a Null object (if present)
|
||||
"""
|
||||
|
||||
if self._processed_extensions is False:
|
||||
self._set_extensions()
|
||||
return self._extended_revoke_value
|
||||
|
||||
@property
|
||||
def basic_ocsp_response(self):
|
||||
"""
|
||||
A shortcut into the BasicOCSPResponse sequence
|
||||
|
||||
:return:
|
||||
None or an asn1crypto.ocsp.BasicOCSPResponse object
|
||||
"""
|
||||
|
||||
return self['response_bytes']['response'].parsed
|
||||
|
||||
@property
|
||||
def response_data(self):
|
||||
"""
|
||||
A shortcut into the parsed, ResponseData sequence
|
||||
|
||||
:return:
|
||||
None or an asn1crypto.ocsp.ResponseData object
|
||||
"""
|
||||
|
||||
return self['response_bytes']['response'].parsed['tbs_response_data']
|
@ -1,292 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Functions for parsing and dumping using the ASN.1 DER encoding. Exports the
|
||||
following items:
|
||||
|
||||
- emit()
|
||||
- parse()
|
||||
- peek()
|
||||
|
||||
Other type classes are defined that help compose the types listed above.
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
import sys
|
||||
|
||||
from ._types import byte_cls, chr_cls, type_name
|
||||
from .util import int_from_bytes, int_to_bytes
|
||||
|
||||
_PY2 = sys.version_info <= (3,)
|
||||
_INSUFFICIENT_DATA_MESSAGE = 'Insufficient data - %s bytes requested but only %s available'
|
||||
_MAX_DEPTH = 10
|
||||
|
||||
|
||||
def emit(class_, method, tag, contents):
|
||||
"""
|
||||
Constructs a byte string of an ASN.1 DER-encoded value
|
||||
|
||||
This is typically not useful. Instead, use one of the standard classes from
|
||||
asn1crypto.core, or construct a new class with specific fields, and call the
|
||||
.dump() method.
|
||||
|
||||
:param class_:
|
||||
An integer ASN.1 class value: 0 (universal), 1 (application),
|
||||
2 (context), 3 (private)
|
||||
|
||||
:param method:
|
||||
An integer ASN.1 method value: 0 (primitive), 1 (constructed)
|
||||
|
||||
:param tag:
|
||||
An integer ASN.1 tag value
|
||||
|
||||
:param contents:
|
||||
A byte string of the encoded byte contents
|
||||
|
||||
:return:
|
||||
A byte string of the ASN.1 DER value (header and contents)
|
||||
"""
|
||||
|
||||
if not isinstance(class_, int):
|
||||
raise TypeError('class_ must be an integer, not %s' % type_name(class_))
|
||||
|
||||
if class_ < 0 or class_ > 3:
|
||||
raise ValueError('class_ must be one of 0, 1, 2 or 3, not %s' % class_)
|
||||
|
||||
if not isinstance(method, int):
|
||||
raise TypeError('method must be an integer, not %s' % type_name(method))
|
||||
|
||||
if method < 0 or method > 1:
|
||||
raise ValueError('method must be 0 or 1, not %s' % method)
|
||||
|
||||
if not isinstance(tag, int):
|
||||
raise TypeError('tag must be an integer, not %s' % type_name(tag))
|
||||
|
||||
if tag < 0:
|
||||
raise ValueError('tag must be greater than zero, not %s' % tag)
|
||||
|
||||
if not isinstance(contents, byte_cls):
|
||||
raise TypeError('contents must be a byte string, not %s' % type_name(contents))
|
||||
|
||||
return _dump_header(class_, method, tag, contents) + contents
|
||||
|
||||
|
||||
def parse(contents, strict=False):
|
||||
"""
|
||||
Parses a byte string of ASN.1 BER/DER-encoded data.
|
||||
|
||||
This is typically not useful. Instead, use one of the standard classes from
|
||||
asn1crypto.core, or construct a new class with specific fields, and call the
|
||||
.load() class method.
|
||||
|
||||
:param contents:
|
||||
A byte string of BER/DER-encoded data
|
||||
|
||||
:param strict:
|
||||
A boolean indicating if trailing data should be forbidden - if so, a
|
||||
ValueError will be raised when trailing data exists
|
||||
|
||||
:raises:
|
||||
ValueError - when the contents do not contain an ASN.1 header or are truncated in some way
|
||||
TypeError - when contents is not a byte string
|
||||
|
||||
:return:
|
||||
A 6-element tuple:
|
||||
- 0: integer class (0 to 3)
|
||||
- 1: integer method
|
||||
- 2: integer tag
|
||||
- 3: byte string header
|
||||
- 4: byte string content
|
||||
- 5: byte string trailer
|
||||
"""
|
||||
|
||||
if not isinstance(contents, byte_cls):
|
||||
raise TypeError('contents must be a byte string, not %s' % type_name(contents))
|
||||
|
||||
contents_len = len(contents)
|
||||
info, consumed = _parse(contents, contents_len)
|
||||
if strict and consumed != contents_len:
|
||||
raise ValueError('Extra data - %d bytes of trailing data were provided' % (contents_len - consumed))
|
||||
return info
|
||||
|
||||
|
||||
def peek(contents):
|
||||
"""
|
||||
Parses a byte string of ASN.1 BER/DER-encoded data to find the length
|
||||
|
||||
This is typically used to look into an encoded value to see how long the
|
||||
next chunk of ASN.1-encoded data is. Primarily it is useful when a
|
||||
value is a concatenation of multiple values.
|
||||
|
||||
:param contents:
|
||||
A byte string of BER/DER-encoded data
|
||||
|
||||
:raises:
|
||||
ValueError - when the contents do not contain an ASN.1 header or are truncated in some way
|
||||
TypeError - when contents is not a byte string
|
||||
|
||||
:return:
|
||||
An integer with the number of bytes occupied by the ASN.1 value
|
||||
"""
|
||||
|
||||
if not isinstance(contents, byte_cls):
|
||||
raise TypeError('contents must be a byte string, not %s' % type_name(contents))
|
||||
|
||||
info, consumed = _parse(contents, len(contents))
|
||||
return consumed
|
||||
|
||||
|
||||
def _parse(encoded_data, data_len, pointer=0, lengths_only=False, depth=0):
|
||||
"""
|
||||
Parses a byte string into component parts
|
||||
|
||||
:param encoded_data:
|
||||
A byte string that contains BER-encoded data
|
||||
|
||||
:param data_len:
|
||||
The integer length of the encoded data
|
||||
|
||||
:param pointer:
|
||||
The index in the byte string to parse from
|
||||
|
||||
:param lengths_only:
|
||||
A boolean to cause the call to return a 2-element tuple of the integer
|
||||
number of bytes in the header and the integer number of bytes in the
|
||||
contents. Internal use only.
|
||||
|
||||
:param depth:
|
||||
The recursion depth when evaluating indefinite-length encoding.
|
||||
|
||||
:return:
|
||||
A 2-element tuple:
|
||||
- 0: A tuple of (class_, method, tag, header, content, trailer)
|
||||
- 1: An integer indicating how many bytes were consumed
|
||||
"""
|
||||
|
||||
if depth > _MAX_DEPTH:
|
||||
raise ValueError('Indefinite-length recursion limit exceeded')
|
||||
|
||||
start = pointer
|
||||
|
||||
if data_len < pointer + 1:
|
||||
raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (1, data_len - pointer))
|
||||
first_octet = ord(encoded_data[pointer]) if _PY2 else encoded_data[pointer]
|
||||
|
||||
pointer += 1
|
||||
|
||||
tag = first_octet & 31
|
||||
constructed = (first_octet >> 5) & 1
|
||||
# Base 128 length using 8th bit as continuation indicator
|
||||
if tag == 31:
|
||||
tag = 0
|
||||
while True:
|
||||
if data_len < pointer + 1:
|
||||
raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (1, data_len - pointer))
|
||||
num = ord(encoded_data[pointer]) if _PY2 else encoded_data[pointer]
|
||||
pointer += 1
|
||||
if num == 0x80 and tag == 0:
|
||||
raise ValueError('Non-minimal tag encoding')
|
||||
tag *= 128
|
||||
tag += num & 127
|
||||
if num >> 7 == 0:
|
||||
break
|
||||
if tag < 31:
|
||||
raise ValueError('Non-minimal tag encoding')
|
||||
|
||||
if data_len < pointer + 1:
|
||||
raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (1, data_len - pointer))
|
||||
length_octet = ord(encoded_data[pointer]) if _PY2 else encoded_data[pointer]
|
||||
pointer += 1
|
||||
trailer = b''
|
||||
|
||||
if length_octet >> 7 == 0:
|
||||
contents_end = pointer + (length_octet & 127)
|
||||
|
||||
else:
|
||||
length_octets = length_octet & 127
|
||||
if length_octets:
|
||||
if data_len < pointer + length_octets:
|
||||
raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (length_octets, data_len - pointer))
|
||||
pointer += length_octets
|
||||
contents_end = pointer + int_from_bytes(encoded_data[pointer - length_octets:pointer], signed=False)
|
||||
|
||||
else:
|
||||
# To properly parse indefinite length values, we need to scan forward
|
||||
# parsing headers until we find a value with a length of zero. If we
|
||||
# just scanned looking for \x00\x00, nested indefinite length values
|
||||
# would not work.
|
||||
if not constructed:
|
||||
raise ValueError('Indefinite-length element must be constructed')
|
||||
contents_end = pointer
|
||||
while data_len < contents_end + 2 or encoded_data[contents_end:contents_end+2] != b'\x00\x00':
|
||||
_, contents_end = _parse(encoded_data, data_len, contents_end, lengths_only=True, depth=depth+1)
|
||||
contents_end += 2
|
||||
trailer = b'\x00\x00'
|
||||
|
||||
if contents_end > data_len:
|
||||
raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (contents_end - pointer, data_len - pointer))
|
||||
|
||||
if lengths_only:
|
||||
return (pointer, contents_end)
|
||||
|
||||
return (
|
||||
(
|
||||
first_octet >> 6,
|
||||
constructed,
|
||||
tag,
|
||||
encoded_data[start:pointer],
|
||||
encoded_data[pointer:contents_end-len(trailer)],
|
||||
trailer
|
||||
),
|
||||
contents_end
|
||||
)
|
||||
|
||||
|
||||
def _dump_header(class_, method, tag, contents):
|
||||
"""
|
||||
Constructs the header bytes for an ASN.1 object
|
||||
|
||||
:param class_:
|
||||
An integer ASN.1 class value: 0 (universal), 1 (application),
|
||||
2 (context), 3 (private)
|
||||
|
||||
:param method:
|
||||
An integer ASN.1 method value: 0 (primitive), 1 (constructed)
|
||||
|
||||
:param tag:
|
||||
An integer ASN.1 tag value
|
||||
|
||||
:param contents:
|
||||
A byte string of the encoded byte contents
|
||||
|
||||
:return:
|
||||
A byte string of the ASN.1 DER header
|
||||
"""
|
||||
|
||||
header = b''
|
||||
|
||||
id_num = 0
|
||||
id_num |= class_ << 6
|
||||
id_num |= method << 5
|
||||
|
||||
if tag >= 31:
|
||||
cont_bit = 0
|
||||
while tag > 0:
|
||||
header = chr_cls(cont_bit | (tag & 0x7f)) + header
|
||||
if not cont_bit:
|
||||
cont_bit = 0x80
|
||||
tag = tag >> 7
|
||||
header = chr_cls(id_num | 31) + header
|
||||
else:
|
||||
header += chr_cls(id_num | tag)
|
||||
|
||||
length = len(contents)
|
||||
if length <= 127:
|
||||
header += chr_cls(length)
|
||||
else:
|
||||
length_bytes = int_to_bytes(length)
|
||||
header += chr_cls(0x80 | len(length_bytes))
|
||||
header += length_bytes
|
||||
|
||||
return header
|
@ -1,84 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
ASN.1 type classes for PDF signature structures. Adds extra oid mapping and
|
||||
value parsing to asn1crypto.x509.Extension() and asn1crypto.xms.CMSAttribute().
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
from .cms import CMSAttributeType, CMSAttribute
|
||||
from .core import (
|
||||
Boolean,
|
||||
Integer,
|
||||
Null,
|
||||
ObjectIdentifier,
|
||||
OctetString,
|
||||
Sequence,
|
||||
SequenceOf,
|
||||
SetOf,
|
||||
)
|
||||
from .crl import CertificateList
|
||||
from .ocsp import OCSPResponse
|
||||
from .x509 import (
|
||||
Extension,
|
||||
ExtensionId,
|
||||
GeneralName,
|
||||
KeyPurposeId,
|
||||
)
|
||||
|
||||
|
||||
class AdobeArchiveRevInfo(Sequence):
|
||||
_fields = [
|
||||
('version', Integer)
|
||||
]
|
||||
|
||||
|
||||
class AdobeTimestamp(Sequence):
|
||||
_fields = [
|
||||
('version', Integer),
|
||||
('location', GeneralName),
|
||||
('requires_auth', Boolean, {'optional': True, 'default': False}),
|
||||
]
|
||||
|
||||
|
||||
class OtherRevInfo(Sequence):
|
||||
_fields = [
|
||||
('type', ObjectIdentifier),
|
||||
('value', OctetString),
|
||||
]
|
||||
|
||||
|
||||
class SequenceOfCertificateList(SequenceOf):
|
||||
_child_spec = CertificateList
|
||||
|
||||
|
||||
class SequenceOfOCSPResponse(SequenceOf):
|
||||
_child_spec = OCSPResponse
|
||||
|
||||
|
||||
class SequenceOfOtherRevInfo(SequenceOf):
|
||||
_child_spec = OtherRevInfo
|
||||
|
||||
|
||||
class RevocationInfoArchival(Sequence):
|
||||
_fields = [
|
||||
('crl', SequenceOfCertificateList, {'explicit': 0, 'optional': True}),
|
||||
('ocsp', SequenceOfOCSPResponse, {'explicit': 1, 'optional': True}),
|
||||
('other_rev_info', SequenceOfOtherRevInfo, {'explicit': 2, 'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class SetOfRevocationInfoArchival(SetOf):
|
||||
_child_spec = RevocationInfoArchival
|
||||
|
||||
|
||||
ExtensionId._map['1.2.840.113583.1.1.9.2'] = 'adobe_archive_rev_info'
|
||||
ExtensionId._map['1.2.840.113583.1.1.9.1'] = 'adobe_timestamp'
|
||||
ExtensionId._map['1.2.840.113583.1.1.10'] = 'adobe_ppklite_credential'
|
||||
Extension._oid_specs['adobe_archive_rev_info'] = AdobeArchiveRevInfo
|
||||
Extension._oid_specs['adobe_timestamp'] = AdobeTimestamp
|
||||
Extension._oid_specs['adobe_ppklite_credential'] = Null
|
||||
KeyPurposeId._map['1.2.840.113583.1.1.5'] = 'pdf_signing'
|
||||
CMSAttributeType._map['1.2.840.113583.1.1.8'] = 'adobe_revocation_info_archival'
|
||||
CMSAttribute._oid_specs['adobe_revocation_info_archival'] = SetOfRevocationInfoArchival
|
@ -1,222 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Encoding DER to PEM and decoding PEM to DER. Exports the following items:
|
||||
|
||||
- armor()
|
||||
- detect()
|
||||
- unarmor()
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
import base64
|
||||
import re
|
||||
import sys
|
||||
|
||||
from ._errors import unwrap
|
||||
from ._types import type_name as _type_name, str_cls, byte_cls
|
||||
|
||||
if sys.version_info < (3,):
|
||||
from cStringIO import StringIO as BytesIO
|
||||
else:
|
||||
from io import BytesIO
|
||||
|
||||
|
||||
def detect(byte_string):
|
||||
"""
|
||||
Detect if a byte string seems to contain a PEM-encoded block
|
||||
|
||||
:param byte_string:
|
||||
A byte string to look through
|
||||
|
||||
:return:
|
||||
A boolean, indicating if a PEM-encoded block is contained in the byte
|
||||
string
|
||||
"""
|
||||
|
||||
if not isinstance(byte_string, byte_cls):
|
||||
raise TypeError(unwrap(
|
||||
'''
|
||||
byte_string must be a byte string, not %s
|
||||
''',
|
||||
_type_name(byte_string)
|
||||
))
|
||||
|
||||
return byte_string.find(b'-----BEGIN') != -1 or byte_string.find(b'---- BEGIN') != -1
|
||||
|
||||
|
||||
def armor(type_name, der_bytes, headers=None):
|
||||
"""
|
||||
Armors a DER-encoded byte string in PEM
|
||||
|
||||
:param type_name:
|
||||
A unicode string that will be capitalized and placed in the header
|
||||
and footer of the block. E.g. "CERTIFICATE", "PRIVATE KEY", etc. This
|
||||
will appear as "-----BEGIN CERTIFICATE-----" and
|
||||
"-----END CERTIFICATE-----".
|
||||
|
||||
:param der_bytes:
|
||||
A byte string to be armored
|
||||
|
||||
:param headers:
|
||||
An OrderedDict of the header lines to write after the BEGIN line
|
||||
|
||||
:return:
|
||||
A byte string of the PEM block
|
||||
"""
|
||||
|
||||
if not isinstance(der_bytes, byte_cls):
|
||||
raise TypeError(unwrap(
|
||||
'''
|
||||
der_bytes must be a byte string, not %s
|
||||
''' % _type_name(der_bytes)
|
||||
))
|
||||
|
||||
if not isinstance(type_name, str_cls):
|
||||
raise TypeError(unwrap(
|
||||
'''
|
||||
type_name must be a unicode string, not %s
|
||||
''',
|
||||
_type_name(type_name)
|
||||
))
|
||||
|
||||
type_name = type_name.upper().encode('ascii')
|
||||
|
||||
output = BytesIO()
|
||||
output.write(b'-----BEGIN ')
|
||||
output.write(type_name)
|
||||
output.write(b'-----\n')
|
||||
if headers:
|
||||
for key in headers:
|
||||
output.write(key.encode('ascii'))
|
||||
output.write(b': ')
|
||||
output.write(headers[key].encode('ascii'))
|
||||
output.write(b'\n')
|
||||
output.write(b'\n')
|
||||
b64_bytes = base64.b64encode(der_bytes)
|
||||
b64_len = len(b64_bytes)
|
||||
i = 0
|
||||
while i < b64_len:
|
||||
output.write(b64_bytes[i:i + 64])
|
||||
output.write(b'\n')
|
||||
i += 64
|
||||
output.write(b'-----END ')
|
||||
output.write(type_name)
|
||||
output.write(b'-----\n')
|
||||
|
||||
return output.getvalue()
|
||||
|
||||
|
||||
def _unarmor(pem_bytes):
|
||||
"""
|
||||
Convert a PEM-encoded byte string into one or more DER-encoded byte strings
|
||||
|
||||
:param pem_bytes:
|
||||
A byte string of the PEM-encoded data
|
||||
|
||||
:raises:
|
||||
ValueError - when the pem_bytes do not appear to be PEM-encoded bytes
|
||||
|
||||
:return:
|
||||
A generator of 3-element tuples in the format: (object_type, headers,
|
||||
der_bytes). The object_type is a unicode string of what is between
|
||||
"-----BEGIN " and "-----". Examples include: "CERTIFICATE",
|
||||
"PUBLIC KEY", "PRIVATE KEY". The headers is a dict containing any lines
|
||||
in the form "Name: Value" that are right after the begin line.
|
||||
"""
|
||||
|
||||
if not isinstance(pem_bytes, byte_cls):
|
||||
raise TypeError(unwrap(
|
||||
'''
|
||||
pem_bytes must be a byte string, not %s
|
||||
''',
|
||||
_type_name(pem_bytes)
|
||||
))
|
||||
|
||||
# Valid states include: "trash", "headers", "body"
|
||||
state = 'trash'
|
||||
headers = {}
|
||||
base64_data = b''
|
||||
object_type = None
|
||||
|
||||
found_start = False
|
||||
found_end = False
|
||||
|
||||
for line in pem_bytes.splitlines(False):
|
||||
if line == b'':
|
||||
continue
|
||||
|
||||
if state == "trash":
|
||||
# Look for a starting line since some CA cert bundle show the cert
|
||||
# into in a parsed format above each PEM block
|
||||
type_name_match = re.match(b'^(?:---- |-----)BEGIN ([A-Z0-9 ]+)(?: ----|-----)', line)
|
||||
if not type_name_match:
|
||||
continue
|
||||
object_type = type_name_match.group(1).decode('ascii')
|
||||
|
||||
found_start = True
|
||||
state = 'headers'
|
||||
continue
|
||||
|
||||
if state == 'headers':
|
||||
if line.find(b':') == -1:
|
||||
state = 'body'
|
||||
else:
|
||||
decoded_line = line.decode('ascii')
|
||||
name, value = decoded_line.split(':', 1)
|
||||
headers[name] = value.strip()
|
||||
continue
|
||||
|
||||
if state == 'body':
|
||||
if line[0:5] in (b'-----', b'---- '):
|
||||
der_bytes = base64.b64decode(base64_data)
|
||||
|
||||
yield (object_type, headers, der_bytes)
|
||||
|
||||
state = 'trash'
|
||||
headers = {}
|
||||
base64_data = b''
|
||||
object_type = None
|
||||
found_end = True
|
||||
continue
|
||||
|
||||
base64_data += line
|
||||
|
||||
if not found_start or not found_end:
|
||||
raise ValueError(unwrap(
|
||||
'''
|
||||
pem_bytes does not appear to contain PEM-encoded data - no
|
||||
BEGIN/END combination found
|
||||
'''
|
||||
))
|
||||
|
||||
|
||||
def unarmor(pem_bytes, multiple=False):
|
||||
"""
|
||||
Convert a PEM-encoded byte string into a DER-encoded byte string
|
||||
|
||||
:param pem_bytes:
|
||||
A byte string of the PEM-encoded data
|
||||
|
||||
:param multiple:
|
||||
If True, function will return a generator
|
||||
|
||||
:raises:
|
||||
ValueError - when the pem_bytes do not appear to be PEM-encoded bytes
|
||||
|
||||
:return:
|
||||
A 3-element tuple (object_name, headers, der_bytes). The object_name is
|
||||
a unicode string of what is between "-----BEGIN " and "-----". Examples
|
||||
include: "CERTIFICATE", "PUBLIC KEY", "PRIVATE KEY". The headers is a
|
||||
dict containing any lines in the form "Name: Value" that are right
|
||||
after the begin line.
|
||||
"""
|
||||
|
||||
generator = _unarmor(pem_bytes)
|
||||
|
||||
if not multiple:
|
||||
return next(generator)
|
||||
|
||||
return generator
|
@ -1,193 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
ASN.1 type classes for PKCS#12 files. Exports the following items:
|
||||
|
||||
- CertBag()
|
||||
- CrlBag()
|
||||
- Pfx()
|
||||
- SafeBag()
|
||||
- SecretBag()
|
||||
|
||||
Other type classes are defined that help compose the types listed above.
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
from .algos import DigestInfo
|
||||
from .cms import ContentInfo, SignedData
|
||||
from .core import (
|
||||
Any,
|
||||
BMPString,
|
||||
Integer,
|
||||
ObjectIdentifier,
|
||||
OctetString,
|
||||
ParsableOctetString,
|
||||
Sequence,
|
||||
SequenceOf,
|
||||
SetOf,
|
||||
)
|
||||
from .keys import PrivateKeyInfo, EncryptedPrivateKeyInfo
|
||||
from .x509 import Certificate, KeyPurposeId
|
||||
|
||||
|
||||
# The structures in this file are taken from https://tools.ietf.org/html/rfc7292
|
||||
|
||||
class MacData(Sequence):
|
||||
_fields = [
|
||||
('mac', DigestInfo),
|
||||
('mac_salt', OctetString),
|
||||
('iterations', Integer, {'default': 1}),
|
||||
]
|
||||
|
||||
|
||||
class Version(Integer):
|
||||
_map = {
|
||||
3: 'v3'
|
||||
}
|
||||
|
||||
|
||||
class AttributeType(ObjectIdentifier):
|
||||
_map = {
|
||||
# https://tools.ietf.org/html/rfc2985#page-18
|
||||
'1.2.840.113549.1.9.20': 'friendly_name',
|
||||
'1.2.840.113549.1.9.21': 'local_key_id',
|
||||
# https://support.microsoft.com/en-us/kb/287547
|
||||
'1.3.6.1.4.1.311.17.1': 'microsoft_local_machine_keyset',
|
||||
# https://github.com/frohoff/jdk8u-dev-jdk/blob/master/src/share/classes/sun/security/pkcs12/PKCS12KeyStore.java
|
||||
# this is a set of OIDs, representing key usage, the usual value is a SET of one element OID 2.5.29.37.0
|
||||
'2.16.840.1.113894.746875.1.1': 'trusted_key_usage',
|
||||
}
|
||||
|
||||
|
||||
class SetOfAny(SetOf):
|
||||
_child_spec = Any
|
||||
|
||||
|
||||
class SetOfBMPString(SetOf):
|
||||
_child_spec = BMPString
|
||||
|
||||
|
||||
class SetOfOctetString(SetOf):
|
||||
_child_spec = OctetString
|
||||
|
||||
|
||||
class SetOfKeyPurposeId(SetOf):
|
||||
_child_spec = KeyPurposeId
|
||||
|
||||
|
||||
class Attribute(Sequence):
|
||||
_fields = [
|
||||
('type', AttributeType),
|
||||
('values', None),
|
||||
]
|
||||
|
||||
_oid_specs = {
|
||||
'friendly_name': SetOfBMPString,
|
||||
'local_key_id': SetOfOctetString,
|
||||
'microsoft_csp_name': SetOfBMPString,
|
||||
'trusted_key_usage': SetOfKeyPurposeId,
|
||||
}
|
||||
|
||||
def _values_spec(self):
|
||||
return self._oid_specs.get(self['type'].native, SetOfAny)
|
||||
|
||||
_spec_callbacks = {
|
||||
'values': _values_spec
|
||||
}
|
||||
|
||||
|
||||
class Attributes(SetOf):
|
||||
_child_spec = Attribute
|
||||
|
||||
|
||||
class Pfx(Sequence):
|
||||
_fields = [
|
||||
('version', Version),
|
||||
('auth_safe', ContentInfo),
|
||||
('mac_data', MacData, {'optional': True})
|
||||
]
|
||||
|
||||
_authenticated_safe = None
|
||||
|
||||
@property
|
||||
def authenticated_safe(self):
|
||||
if self._authenticated_safe is None:
|
||||
content = self['auth_safe']['content']
|
||||
if isinstance(content, SignedData):
|
||||
content = content['content_info']['content']
|
||||
self._authenticated_safe = AuthenticatedSafe.load(content.native)
|
||||
return self._authenticated_safe
|
||||
|
||||
|
||||
class AuthenticatedSafe(SequenceOf):
|
||||
_child_spec = ContentInfo
|
||||
|
||||
|
||||
class BagId(ObjectIdentifier):
|
||||
_map = {
|
||||
'1.2.840.113549.1.12.10.1.1': 'key_bag',
|
||||
'1.2.840.113549.1.12.10.1.2': 'pkcs8_shrouded_key_bag',
|
||||
'1.2.840.113549.1.12.10.1.3': 'cert_bag',
|
||||
'1.2.840.113549.1.12.10.1.4': 'crl_bag',
|
||||
'1.2.840.113549.1.12.10.1.5': 'secret_bag',
|
||||
'1.2.840.113549.1.12.10.1.6': 'safe_contents',
|
||||
}
|
||||
|
||||
|
||||
class CertId(ObjectIdentifier):
|
||||
_map = {
|
||||
'1.2.840.113549.1.9.22.1': 'x509',
|
||||
'1.2.840.113549.1.9.22.2': 'sdsi',
|
||||
}
|
||||
|
||||
|
||||
class CertBag(Sequence):
|
||||
_fields = [
|
||||
('cert_id', CertId),
|
||||
('cert_value', ParsableOctetString, {'explicit': 0}),
|
||||
]
|
||||
|
||||
_oid_pair = ('cert_id', 'cert_value')
|
||||
_oid_specs = {
|
||||
'x509': Certificate,
|
||||
}
|
||||
|
||||
|
||||
class CrlBag(Sequence):
|
||||
_fields = [
|
||||
('crl_id', ObjectIdentifier),
|
||||
('crl_value', OctetString, {'explicit': 0}),
|
||||
]
|
||||
|
||||
|
||||
class SecretBag(Sequence):
|
||||
_fields = [
|
||||
('secret_type_id', ObjectIdentifier),
|
||||
('secret_value', OctetString, {'explicit': 0}),
|
||||
]
|
||||
|
||||
|
||||
class SafeContents(SequenceOf):
|
||||
pass
|
||||
|
||||
|
||||
class SafeBag(Sequence):
|
||||
_fields = [
|
||||
('bag_id', BagId),
|
||||
('bag_value', Any, {'explicit': 0}),
|
||||
('bag_attributes', Attributes, {'optional': True}),
|
||||
]
|
||||
|
||||
_oid_pair = ('bag_id', 'bag_value')
|
||||
_oid_specs = {
|
||||
'key_bag': PrivateKeyInfo,
|
||||
'pkcs8_shrouded_key_bag': EncryptedPrivateKeyInfo,
|
||||
'cert_bag': CertBag,
|
||||
'crl_bag': CrlBag,
|
||||
'secret_bag': SecretBag,
|
||||
'safe_contents': SafeContents
|
||||
}
|
||||
|
||||
|
||||
SafeContents._child_spec = SafeBag
|
@ -1,310 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
ASN.1 type classes for the time stamp protocol (TSP). Exports the following
|
||||
items:
|
||||
|
||||
- TimeStampReq()
|
||||
- TimeStampResp()
|
||||
|
||||
Also adds TimeStampedData() support to asn1crypto.cms.ContentInfo(),
|
||||
TimeStampedData() and TSTInfo() support to
|
||||
asn1crypto.cms.EncapsulatedContentInfo() and some oids and value parsers to
|
||||
asn1crypto.cms.CMSAttribute().
|
||||
|
||||
Other type classes are defined that help compose the types listed above.
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
from .algos import DigestAlgorithm
|
||||
from .cms import (
|
||||
CMSAttribute,
|
||||
CMSAttributeType,
|
||||
ContentInfo,
|
||||
ContentType,
|
||||
EncapsulatedContentInfo,
|
||||
)
|
||||
from .core import (
|
||||
Any,
|
||||
BitString,
|
||||
Boolean,
|
||||
Choice,
|
||||
GeneralizedTime,
|
||||
IA5String,
|
||||
Integer,
|
||||
ObjectIdentifier,
|
||||
OctetString,
|
||||
Sequence,
|
||||
SequenceOf,
|
||||
SetOf,
|
||||
UTF8String,
|
||||
)
|
||||
from .crl import CertificateList
|
||||
from .x509 import (
|
||||
Attributes,
|
||||
CertificatePolicies,
|
||||
GeneralName,
|
||||
GeneralNames,
|
||||
)
|
||||
|
||||
|
||||
# The structures in this file are based on https://tools.ietf.org/html/rfc3161,
|
||||
# https://tools.ietf.org/html/rfc4998, https://tools.ietf.org/html/rfc5544,
|
||||
# https://tools.ietf.org/html/rfc5035, https://tools.ietf.org/html/rfc2634
|
||||
|
||||
class Version(Integer):
|
||||
_map = {
|
||||
0: 'v0',
|
||||
1: 'v1',
|
||||
2: 'v2',
|
||||
3: 'v3',
|
||||
4: 'v4',
|
||||
5: 'v5',
|
||||
}
|
||||
|
||||
|
||||
class MessageImprint(Sequence):
|
||||
_fields = [
|
||||
('hash_algorithm', DigestAlgorithm),
|
||||
('hashed_message', OctetString),
|
||||
]
|
||||
|
||||
|
||||
class Accuracy(Sequence):
|
||||
_fields = [
|
||||
('seconds', Integer, {'optional': True}),
|
||||
('millis', Integer, {'implicit': 0, 'optional': True}),
|
||||
('micros', Integer, {'implicit': 1, 'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class Extension(Sequence):
|
||||
_fields = [
|
||||
('extn_id', ObjectIdentifier),
|
||||
('critical', Boolean, {'default': False}),
|
||||
('extn_value', OctetString),
|
||||
]
|
||||
|
||||
|
||||
class Extensions(SequenceOf):
|
||||
_child_spec = Extension
|
||||
|
||||
|
||||
class TSTInfo(Sequence):
|
||||
_fields = [
|
||||
('version', Version),
|
||||
('policy', ObjectIdentifier),
|
||||
('message_imprint', MessageImprint),
|
||||
('serial_number', Integer),
|
||||
('gen_time', GeneralizedTime),
|
||||
('accuracy', Accuracy, {'optional': True}),
|
||||
('ordering', Boolean, {'default': False}),
|
||||
('nonce', Integer, {'optional': True}),
|
||||
('tsa', GeneralName, {'explicit': 0, 'optional': True}),
|
||||
('extensions', Extensions, {'implicit': 1, 'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class TimeStampReq(Sequence):
|
||||
_fields = [
|
||||
('version', Version),
|
||||
('message_imprint', MessageImprint),
|
||||
('req_policy', ObjectIdentifier, {'optional': True}),
|
||||
('nonce', Integer, {'optional': True}),
|
||||
('cert_req', Boolean, {'default': False}),
|
||||
('extensions', Extensions, {'implicit': 0, 'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class PKIStatus(Integer):
|
||||
_map = {
|
||||
0: 'granted',
|
||||
1: 'granted_with_mods',
|
||||
2: 'rejection',
|
||||
3: 'waiting',
|
||||
4: 'revocation_warning',
|
||||
5: 'revocation_notification',
|
||||
}
|
||||
|
||||
|
||||
class PKIFreeText(SequenceOf):
|
||||
_child_spec = UTF8String
|
||||
|
||||
|
||||
class PKIFailureInfo(BitString):
|
||||
_map = {
|
||||
0: 'bad_alg',
|
||||
2: 'bad_request',
|
||||
5: 'bad_data_format',
|
||||
14: 'time_not_available',
|
||||
15: 'unaccepted_policy',
|
||||
16: 'unaccepted_extensions',
|
||||
17: 'add_info_not_available',
|
||||
25: 'system_failure',
|
||||
}
|
||||
|
||||
|
||||
class PKIStatusInfo(Sequence):
|
||||
_fields = [
|
||||
('status', PKIStatus),
|
||||
('status_string', PKIFreeText, {'optional': True}),
|
||||
('fail_info', PKIFailureInfo, {'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class TimeStampResp(Sequence):
|
||||
_fields = [
|
||||
('status', PKIStatusInfo),
|
||||
('time_stamp_token', ContentInfo),
|
||||
]
|
||||
|
||||
|
||||
class MetaData(Sequence):
|
||||
_fields = [
|
||||
('hash_protected', Boolean),
|
||||
('file_name', UTF8String, {'optional': True}),
|
||||
('media_type', IA5String, {'optional': True}),
|
||||
('other_meta_data', Attributes, {'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class TimeStampAndCRL(Sequence):
|
||||
_fields = [
|
||||
('time_stamp', EncapsulatedContentInfo),
|
||||
('crl', CertificateList, {'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class TimeStampTokenEvidence(SequenceOf):
|
||||
_child_spec = TimeStampAndCRL
|
||||
|
||||
|
||||
class DigestAlgorithms(SequenceOf):
|
||||
_child_spec = DigestAlgorithm
|
||||
|
||||
|
||||
class EncryptionInfo(Sequence):
|
||||
_fields = [
|
||||
('encryption_info_type', ObjectIdentifier),
|
||||
('encryption_info_value', Any),
|
||||
]
|
||||
|
||||
|
||||
class PartialHashtree(SequenceOf):
|
||||
_child_spec = OctetString
|
||||
|
||||
|
||||
class PartialHashtrees(SequenceOf):
|
||||
_child_spec = PartialHashtree
|
||||
|
||||
|
||||
class ArchiveTimeStamp(Sequence):
|
||||
_fields = [
|
||||
('digest_algorithm', DigestAlgorithm, {'implicit': 0, 'optional': True}),
|
||||
('attributes', Attributes, {'implicit': 1, 'optional': True}),
|
||||
('reduced_hashtree', PartialHashtrees, {'implicit': 2, 'optional': True}),
|
||||
('time_stamp', ContentInfo),
|
||||
]
|
||||
|
||||
|
||||
class ArchiveTimeStampSequence(SequenceOf):
|
||||
_child_spec = ArchiveTimeStamp
|
||||
|
||||
|
||||
class EvidenceRecord(Sequence):
|
||||
_fields = [
|
||||
('version', Version),
|
||||
('digest_algorithms', DigestAlgorithms),
|
||||
('crypto_infos', Attributes, {'implicit': 0, 'optional': True}),
|
||||
('encryption_info', EncryptionInfo, {'implicit': 1, 'optional': True}),
|
||||
('archive_time_stamp_sequence', ArchiveTimeStampSequence),
|
||||
]
|
||||
|
||||
|
||||
class OtherEvidence(Sequence):
|
||||
_fields = [
|
||||
('oe_type', ObjectIdentifier),
|
||||
('oe_value', Any),
|
||||
]
|
||||
|
||||
|
||||
class Evidence(Choice):
|
||||
_alternatives = [
|
||||
('tst_evidence', TimeStampTokenEvidence, {'implicit': 0}),
|
||||
('ers_evidence', EvidenceRecord, {'implicit': 1}),
|
||||
('other_evidence', OtherEvidence, {'implicit': 2}),
|
||||
]
|
||||
|
||||
|
||||
class TimeStampedData(Sequence):
|
||||
_fields = [
|
||||
('version', Version),
|
||||
('data_uri', IA5String, {'optional': True}),
|
||||
('meta_data', MetaData, {'optional': True}),
|
||||
('content', OctetString, {'optional': True}),
|
||||
('temporal_evidence', Evidence),
|
||||
]
|
||||
|
||||
|
||||
class IssuerSerial(Sequence):
|
||||
_fields = [
|
||||
('issuer', GeneralNames),
|
||||
('serial_number', Integer),
|
||||
]
|
||||
|
||||
|
||||
class ESSCertID(Sequence):
|
||||
_fields = [
|
||||
('cert_hash', OctetString),
|
||||
('issuer_serial', IssuerSerial, {'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class ESSCertIDs(SequenceOf):
|
||||
_child_spec = ESSCertID
|
||||
|
||||
|
||||
class SigningCertificate(Sequence):
|
||||
_fields = [
|
||||
('certs', ESSCertIDs),
|
||||
('policies', CertificatePolicies, {'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class SetOfSigningCertificates(SetOf):
|
||||
_child_spec = SigningCertificate
|
||||
|
||||
|
||||
class ESSCertIDv2(Sequence):
|
||||
_fields = [
|
||||
('hash_algorithm', DigestAlgorithm, {'default': {'algorithm': 'sha256'}}),
|
||||
('cert_hash', OctetString),
|
||||
('issuer_serial', IssuerSerial, {'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class ESSCertIDv2s(SequenceOf):
|
||||
_child_spec = ESSCertIDv2
|
||||
|
||||
|
||||
class SigningCertificateV2(Sequence):
|
||||
_fields = [
|
||||
('certs', ESSCertIDv2s),
|
||||
('policies', CertificatePolicies, {'optional': True}),
|
||||
]
|
||||
|
||||
|
||||
class SetOfSigningCertificatesV2(SetOf):
|
||||
_child_spec = SigningCertificateV2
|
||||
|
||||
|
||||
EncapsulatedContentInfo._oid_specs['tst_info'] = TSTInfo
|
||||
EncapsulatedContentInfo._oid_specs['timestamped_data'] = TimeStampedData
|
||||
ContentInfo._oid_specs['timestamped_data'] = TimeStampedData
|
||||
ContentType._map['1.2.840.113549.1.9.16.1.4'] = 'tst_info'
|
||||
ContentType._map['1.2.840.113549.1.9.16.1.31'] = 'timestamped_data'
|
||||
CMSAttributeType._map['1.2.840.113549.1.9.16.2.12'] = 'signing_certificate'
|
||||
CMSAttribute._oid_specs['signing_certificate'] = SetOfSigningCertificates
|
||||
CMSAttributeType._map['1.2.840.113549.1.9.16.2.47'] = 'signing_certificate_v2'
|
||||
CMSAttribute._oid_specs['signing_certificate_v2'] = SetOfSigningCertificatesV2
|
@ -1,878 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Miscellaneous data helpers, including functions for converting integers to and
|
||||
from bytes and UTC timezone. Exports the following items:
|
||||
|
||||
- OrderedDict()
|
||||
- int_from_bytes()
|
||||
- int_to_bytes()
|
||||
- timezone.utc
|
||||
- utc_with_dst
|
||||
- create_timezone()
|
||||
- inet_ntop()
|
||||
- inet_pton()
|
||||
- uri_to_iri()
|
||||
- iri_to_uri()
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
import math
|
||||
import sys
|
||||
from datetime import datetime, date, timedelta, tzinfo
|
||||
|
||||
from ._errors import unwrap
|
||||
from ._iri import iri_to_uri, uri_to_iri # noqa
|
||||
from ._ordereddict import OrderedDict # noqa
|
||||
from ._types import type_name
|
||||
|
||||
if sys.platform == 'win32':
|
||||
from ._inet import inet_ntop, inet_pton
|
||||
else:
|
||||
from socket import inet_ntop, inet_pton # noqa
|
||||
|
||||
|
||||
# Python 2
|
||||
if sys.version_info <= (3,):
|
||||
|
||||
def int_to_bytes(value, signed=False, width=None):
|
||||
"""
|
||||
Converts an integer to a byte string
|
||||
|
||||
:param value:
|
||||
The integer to convert
|
||||
|
||||
:param signed:
|
||||
If the byte string should be encoded using two's complement
|
||||
|
||||
:param width:
|
||||
If None, the minimal possible size (but at least 1),
|
||||
otherwise an integer of the byte width for the return value
|
||||
|
||||
:return:
|
||||
A byte string
|
||||
"""
|
||||
|
||||
if value == 0 and width == 0:
|
||||
return b''
|
||||
|
||||
# Handle negatives in two's complement
|
||||
is_neg = False
|
||||
if signed and value < 0:
|
||||
is_neg = True
|
||||
bits = int(math.ceil(len('%x' % abs(value)) / 2.0) * 8)
|
||||
value = (value + (1 << bits)) % (1 << bits)
|
||||
|
||||
hex_str = '%x' % value
|
||||
if len(hex_str) & 1:
|
||||
hex_str = '0' + hex_str
|
||||
|
||||
output = hex_str.decode('hex')
|
||||
|
||||
if signed and not is_neg and ord(output[0:1]) & 0x80:
|
||||
output = b'\x00' + output
|
||||
|
||||
if width is not None:
|
||||
if len(output) > width:
|
||||
raise OverflowError('int too big to convert')
|
||||
if is_neg:
|
||||
pad_char = b'\xFF'
|
||||
else:
|
||||
pad_char = b'\x00'
|
||||
output = (pad_char * (width - len(output))) + output
|
||||
elif is_neg and ord(output[0:1]) & 0x80 == 0:
|
||||
output = b'\xFF' + output
|
||||
|
||||
return output
|
||||
|
||||
def int_from_bytes(value, signed=False):
|
||||
"""
|
||||
Converts a byte string to an integer
|
||||
|
||||
:param value:
|
||||
The byte string to convert
|
||||
|
||||
:param signed:
|
||||
If the byte string should be interpreted using two's complement
|
||||
|
||||
:return:
|
||||
An integer
|
||||
"""
|
||||
|
||||
if value == b'':
|
||||
return 0
|
||||
|
||||
num = long(value.encode("hex"), 16) # noqa
|
||||
|
||||
if not signed:
|
||||
return num
|
||||
|
||||
# Check for sign bit and handle two's complement
|
||||
if ord(value[0:1]) & 0x80:
|
||||
bit_len = len(value) * 8
|
||||
return num - (1 << bit_len)
|
||||
|
||||
return num
|
||||
|
||||
class timezone(tzinfo): # noqa
|
||||
"""
|
||||
Implements datetime.timezone for py2.
|
||||
Only full minute offsets are supported.
|
||||
DST is not supported.
|
||||
"""
|
||||
|
||||
def __init__(self, offset, name=None):
|
||||
"""
|
||||
:param offset:
|
||||
A timedelta with this timezone's offset from UTC
|
||||
|
||||
:param name:
|
||||
Name of the timezone; if None, generate one.
|
||||
"""
|
||||
|
||||
if not timedelta(hours=-24) < offset < timedelta(hours=24):
|
||||
raise ValueError('Offset must be in [-23:59, 23:59]')
|
||||
|
||||
if offset.seconds % 60 or offset.microseconds:
|
||||
raise ValueError('Offset must be full minutes')
|
||||
|
||||
self._offset = offset
|
||||
|
||||
if name is not None:
|
||||
self._name = name
|
||||
elif not offset:
|
||||
self._name = 'UTC'
|
||||
else:
|
||||
self._name = 'UTC' + _format_offset(offset)
|
||||
|
||||
def __eq__(self, other):
|
||||
"""
|
||||
Compare two timezones
|
||||
|
||||
:param other:
|
||||
The other timezone to compare to
|
||||
|
||||
:return:
|
||||
A boolean
|
||||
"""
|
||||
|
||||
if type(other) != timezone:
|
||||
return False
|
||||
return self._offset == other._offset
|
||||
|
||||
def __getinitargs__(self):
|
||||
"""
|
||||
Called by tzinfo.__reduce__ to support pickle and copy.
|
||||
|
||||
:return:
|
||||
offset and name, to be used for __init__
|
||||
"""
|
||||
|
||||
return self._offset, self._name
|
||||
|
||||
def tzname(self, dt):
|
||||
"""
|
||||
:param dt:
|
||||
A datetime object; ignored.
|
||||
|
||||
:return:
|
||||
Name of this timezone
|
||||
"""
|
||||
|
||||
return self._name
|
||||
|
||||
def utcoffset(self, dt):
|
||||
"""
|
||||
:param dt:
|
||||
A datetime object; ignored.
|
||||
|
||||
:return:
|
||||
A timedelta object with the offset from UTC
|
||||
"""
|
||||
|
||||
return self._offset
|
||||
|
||||
def dst(self, dt):
|
||||
"""
|
||||
:param dt:
|
||||
A datetime object; ignored.
|
||||
|
||||
:return:
|
||||
Zero timedelta
|
||||
"""
|
||||
|
||||
return timedelta(0)
|
||||
|
||||
timezone.utc = timezone(timedelta(0))
|
||||
|
||||
# Python 3
|
||||
else:
|
||||
|
||||
from datetime import timezone # noqa
|
||||
|
||||
def int_to_bytes(value, signed=False, width=None):
|
||||
"""
|
||||
Converts an integer to a byte string
|
||||
|
||||
:param value:
|
||||
The integer to convert
|
||||
|
||||
:param signed:
|
||||
If the byte string should be encoded using two's complement
|
||||
|
||||
:param width:
|
||||
If None, the minimal possible size (but at least 1),
|
||||
otherwise an integer of the byte width for the return value
|
||||
|
||||
:return:
|
||||
A byte string
|
||||
"""
|
||||
|
||||
if width is None:
|
||||
if signed:
|
||||
if value < 0:
|
||||
bits_required = abs(value + 1).bit_length()
|
||||
else:
|
||||
bits_required = value.bit_length()
|
||||
if bits_required % 8 == 0:
|
||||
bits_required += 1
|
||||
else:
|
||||
bits_required = value.bit_length()
|
||||
width = math.ceil(bits_required / 8) or 1
|
||||
return value.to_bytes(width, byteorder='big', signed=signed)
|
||||
|
||||
def int_from_bytes(value, signed=False):
|
||||
"""
|
||||
Converts a byte string to an integer
|
||||
|
||||
:param value:
|
||||
The byte string to convert
|
||||
|
||||
:param signed:
|
||||
If the byte string should be interpreted using two's complement
|
||||
|
||||
:return:
|
||||
An integer
|
||||
"""
|
||||
|
||||
return int.from_bytes(value, 'big', signed=signed)
|
||||
|
||||
|
||||
def _format_offset(off):
|
||||
"""
|
||||
Format a timedelta into "[+-]HH:MM" format or "" for None
|
||||
"""
|
||||
|
||||
if off is None:
|
||||
return ''
|
||||
mins = off.days * 24 * 60 + off.seconds // 60
|
||||
sign = '-' if mins < 0 else '+'
|
||||
return sign + '%02d:%02d' % divmod(abs(mins), 60)
|
||||
|
||||
|
||||
class _UtcWithDst(tzinfo):
|
||||
"""
|
||||
Utc class where dst does not return None; required for astimezone
|
||||
"""
|
||||
|
||||
def tzname(self, dt):
|
||||
return 'UTC'
|
||||
|
||||
def utcoffset(self, dt):
|
||||
return timedelta(0)
|
||||
|
||||
def dst(self, dt):
|
||||
return timedelta(0)
|
||||
|
||||
|
||||
utc_with_dst = _UtcWithDst()
|
||||
|
||||
_timezone_cache = {}
|
||||
|
||||
|
||||
def create_timezone(offset):
|
||||
"""
|
||||
Returns a new datetime.timezone object with the given offset.
|
||||
Uses cached objects if possible.
|
||||
|
||||
:param offset:
|
||||
A datetime.timedelta object; It needs to be in full minutes and between -23:59 and +23:59.
|
||||
|
||||
:return:
|
||||
A datetime.timezone object
|
||||
"""
|
||||
|
||||
try:
|
||||
tz = _timezone_cache[offset]
|
||||
except KeyError:
|
||||
tz = _timezone_cache[offset] = timezone(offset)
|
||||
return tz
|
||||
|
||||
|
||||
class extended_date(object):
|
||||
"""
|
||||
A datetime.datetime-like object that represents the year 0. This is just
|
||||
to handle 0000-01-01 found in some certificates. Python's datetime does
|
||||
not support year 0.
|
||||
|
||||
The proleptic gregorian calendar repeats itself every 400 years. Therefore,
|
||||
the simplest way to format is to substitute year 2000.
|
||||
"""
|
||||
|
||||
def __init__(self, year, month, day):
|
||||
"""
|
||||
:param year:
|
||||
The integer 0
|
||||
|
||||
:param month:
|
||||
An integer from 1 to 12
|
||||
|
||||
:param day:
|
||||
An integer from 1 to 31
|
||||
"""
|
||||
|
||||
if year != 0:
|
||||
raise ValueError('year must be 0')
|
||||
|
||||
self._y2k = date(2000, month, day)
|
||||
|
||||
@property
|
||||
def year(self):
|
||||
"""
|
||||
:return:
|
||||
The integer 0
|
||||
"""
|
||||
|
||||
return 0
|
||||
|
||||
@property
|
||||
def month(self):
|
||||
"""
|
||||
:return:
|
||||
An integer from 1 to 12
|
||||
"""
|
||||
|
||||
return self._y2k.month
|
||||
|
||||
@property
|
||||
def day(self):
|
||||
"""
|
||||
:return:
|
||||
An integer from 1 to 31
|
||||
"""
|
||||
|
||||
return self._y2k.day
|
||||
|
||||
def strftime(self, format):
|
||||
"""
|
||||
Formats the date using strftime()
|
||||
|
||||
:param format:
|
||||
A strftime() format string
|
||||
|
||||
:return:
|
||||
A str, the formatted date as a unicode string
|
||||
in Python 3 and a byte string in Python 2
|
||||
"""
|
||||
|
||||
# Format the date twice, once with year 2000, once with year 4000.
|
||||
# The only differences in the result will be in the millennium. Find them and replace by zeros.
|
||||
y2k = self._y2k.strftime(format)
|
||||
y4k = self._y2k.replace(year=4000).strftime(format)
|
||||
return ''.join('0' if (c2, c4) == ('2', '4') else c2 for c2, c4 in zip(y2k, y4k))
|
||||
|
||||
def isoformat(self):
|
||||
"""
|
||||
Formats the date as %Y-%m-%d
|
||||
|
||||
:return:
|
||||
The date formatted to %Y-%m-%d as a unicode string in Python 3
|
||||
and a byte string in Python 2
|
||||
"""
|
||||
|
||||
return self.strftime('0000-%m-%d')
|
||||
|
||||
def replace(self, year=None, month=None, day=None):
|
||||
"""
|
||||
Returns a new datetime.date or asn1crypto.util.extended_date
|
||||
object with the specified components replaced
|
||||
|
||||
:return:
|
||||
A datetime.date or asn1crypto.util.extended_date object
|
||||
"""
|
||||
|
||||
if year is None:
|
||||
year = self.year
|
||||
if month is None:
|
||||
month = self.month
|
||||
if day is None:
|
||||
day = self.day
|
||||
|
||||
if year > 0:
|
||||
cls = date
|
||||
else:
|
||||
cls = extended_date
|
||||
|
||||
return cls(
|
||||
year,
|
||||
month,
|
||||
day
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
"""
|
||||
:return:
|
||||
A str representing this extended_date, e.g. "0000-01-01"
|
||||
"""
|
||||
|
||||
return self.strftime('%Y-%m-%d')
|
||||
|
||||
def __eq__(self, other):
|
||||
"""
|
||||
Compare two extended_date objects
|
||||
|
||||
:param other:
|
||||
The other extended_date to compare to
|
||||
|
||||
:return:
|
||||
A boolean
|
||||
"""
|
||||
|
||||
# datetime.date object wouldn't compare equal because it can't be year 0
|
||||
if not isinstance(other, self.__class__):
|
||||
return False
|
||||
return self.__cmp__(other) == 0
|
||||
|
||||
def __ne__(self, other):
|
||||
"""
|
||||
Compare two extended_date objects
|
||||
|
||||
:param other:
|
||||
The other extended_date to compare to
|
||||
|
||||
:return:
|
||||
A boolean
|
||||
"""
|
||||
|
||||
return not self.__eq__(other)
|
||||
|
||||
def _comparison_error(self, other):
|
||||
raise TypeError(unwrap(
|
||||
'''
|
||||
An asn1crypto.util.extended_date object can only be compared to
|
||||
an asn1crypto.util.extended_date or datetime.date object, not %s
|
||||
''',
|
||||
type_name(other)
|
||||
))
|
||||
|
||||
def __cmp__(self, other):
|
||||
"""
|
||||
Compare two extended_date or datetime.date objects
|
||||
|
||||
:param other:
|
||||
The other extended_date object to compare to
|
||||
|
||||
:return:
|
||||
An integer smaller than, equal to, or larger than 0
|
||||
"""
|
||||
|
||||
# self is year 0, other is >= year 1
|
||||
if isinstance(other, date):
|
||||
return -1
|
||||
|
||||
if not isinstance(other, self.__class__):
|
||||
self._comparison_error(other)
|
||||
|
||||
if self._y2k < other._y2k:
|
||||
return -1
|
||||
if self._y2k > other._y2k:
|
||||
return 1
|
||||
return 0
|
||||
|
||||
def __lt__(self, other):
|
||||
return self.__cmp__(other) < 0
|
||||
|
||||
def __le__(self, other):
|
||||
return self.__cmp__(other) <= 0
|
||||
|
||||
def __gt__(self, other):
|
||||
return self.__cmp__(other) > 0
|
||||
|
||||
def __ge__(self, other):
|
||||
return self.__cmp__(other) >= 0
|
||||
|
||||
|
||||
class extended_datetime(object):
|
||||
"""
|
||||
A datetime.datetime-like object that represents the year 0. This is just
|
||||
to handle 0000-01-01 found in some certificates. Python's datetime does
|
||||
not support year 0.
|
||||
|
||||
The proleptic gregorian calendar repeats itself every 400 years. Therefore,
|
||||
the simplest way to format is to substitute year 2000.
|
||||
"""
|
||||
|
||||
# There are 97 leap days during 400 years.
|
||||
DAYS_IN_400_YEARS = 400 * 365 + 97
|
||||
DAYS_IN_2000_YEARS = 5 * DAYS_IN_400_YEARS
|
||||
|
||||
def __init__(self, year, *args, **kwargs):
|
||||
"""
|
||||
:param year:
|
||||
The integer 0
|
||||
|
||||
:param args:
|
||||
Other positional arguments; see datetime.datetime.
|
||||
|
||||
:param kwargs:
|
||||
Other keyword arguments; see datetime.datetime.
|
||||
"""
|
||||
|
||||
if year != 0:
|
||||
raise ValueError('year must be 0')
|
||||
|
||||
self._y2k = datetime(2000, *args, **kwargs)
|
||||
|
||||
@property
|
||||
def year(self):
|
||||
"""
|
||||
:return:
|
||||
The integer 0
|
||||
"""
|
||||
|
||||
return 0
|
||||
|
||||
@property
|
||||
def month(self):
|
||||
"""
|
||||
:return:
|
||||
An integer from 1 to 12
|
||||
"""
|
||||
|
||||
return self._y2k.month
|
||||
|
||||
@property
|
||||
def day(self):
|
||||
"""
|
||||
:return:
|
||||
An integer from 1 to 31
|
||||
"""
|
||||
|
||||
return self._y2k.day
|
||||
|
||||
@property
|
||||
def hour(self):
|
||||
"""
|
||||
:return:
|
||||
An integer from 1 to 24
|
||||
"""
|
||||
|
||||
return self._y2k.hour
|
||||
|
||||
@property
|
||||
def minute(self):
|
||||
"""
|
||||
:return:
|
||||
An integer from 1 to 60
|
||||
"""
|
||||
|
||||
return self._y2k.minute
|
||||
|
||||
@property
|
||||
def second(self):
|
||||
"""
|
||||
:return:
|
||||
An integer from 1 to 60
|
||||
"""
|
||||
|
||||
return self._y2k.second
|
||||
|
||||
@property
|
||||
def microsecond(self):
|
||||
"""
|
||||
:return:
|
||||
An integer from 0 to 999999
|
||||
"""
|
||||
|
||||
return self._y2k.microsecond
|
||||
|
||||
@property
|
||||
def tzinfo(self):
|
||||
"""
|
||||
:return:
|
||||
If object is timezone aware, a datetime.tzinfo object, else None.
|
||||
"""
|
||||
|
||||
return self._y2k.tzinfo
|
||||
|
||||
def utcoffset(self):
|
||||
"""
|
||||
:return:
|
||||
If object is timezone aware, a datetime.timedelta object, else None.
|
||||
"""
|
||||
|
||||
return self._y2k.utcoffset()
|
||||
|
||||
def time(self):
|
||||
"""
|
||||
:return:
|
||||
A datetime.time object
|
||||
"""
|
||||
|
||||
return self._y2k.time()
|
||||
|
||||
def date(self):
|
||||
"""
|
||||
:return:
|
||||
An asn1crypto.util.extended_date of the date
|
||||
"""
|
||||
|
||||
return extended_date(0, self.month, self.day)
|
||||
|
||||
def strftime(self, format):
|
||||
"""
|
||||
Performs strftime(), always returning a str
|
||||
|
||||
:param format:
|
||||
A strftime() format string
|
||||
|
||||
:return:
|
||||
A str of the formatted datetime
|
||||
"""
|
||||
|
||||
# Format the datetime twice, once with year 2000, once with year 4000.
|
||||
# The only differences in the result will be in the millennium. Find them and replace by zeros.
|
||||
y2k = self._y2k.strftime(format)
|
||||
y4k = self._y2k.replace(year=4000).strftime(format)
|
||||
return ''.join('0' if (c2, c4) == ('2', '4') else c2 for c2, c4 in zip(y2k, y4k))
|
||||
|
||||
def isoformat(self, sep='T'):
|
||||
"""
|
||||
Formats the date as "%Y-%m-%d %H:%M:%S" with the sep param between the
|
||||
date and time portions
|
||||
|
||||
:param set:
|
||||
A single character of the separator to place between the date and
|
||||
time
|
||||
|
||||
:return:
|
||||
The formatted datetime as a unicode string in Python 3 and a byte
|
||||
string in Python 2
|
||||
"""
|
||||
|
||||
s = '0000-%02d-%02d%c%02d:%02d:%02d' % (self.month, self.day, sep, self.hour, self.minute, self.second)
|
||||
if self.microsecond:
|
||||
s += '.%06d' % self.microsecond
|
||||
return s + _format_offset(self.utcoffset())
|
||||
|
||||
def replace(self, year=None, *args, **kwargs):
|
||||
"""
|
||||
Returns a new datetime.datetime or asn1crypto.util.extended_datetime
|
||||
object with the specified components replaced
|
||||
|
||||
:param year:
|
||||
The new year to substitute. None to keep it.
|
||||
|
||||
:param args:
|
||||
Other positional arguments; see datetime.datetime.replace.
|
||||
|
||||
:param kwargs:
|
||||
Other keyword arguments; see datetime.datetime.replace.
|
||||
|
||||
:return:
|
||||
A datetime.datetime or asn1crypto.util.extended_datetime object
|
||||
"""
|
||||
|
||||
if year:
|
||||
return self._y2k.replace(year, *args, **kwargs)
|
||||
|
||||
return extended_datetime.from_y2k(self._y2k.replace(2000, *args, **kwargs))
|
||||
|
||||
def astimezone(self, tz):
|
||||
"""
|
||||
Convert this extended_datetime to another timezone.
|
||||
|
||||
:param tz:
|
||||
A datetime.tzinfo object.
|
||||
|
||||
:return:
|
||||
A new extended_datetime or datetime.datetime object
|
||||
"""
|
||||
|
||||
return extended_datetime.from_y2k(self._y2k.astimezone(tz))
|
||||
|
||||
def timestamp(self):
|
||||
"""
|
||||
Return POSIX timestamp. Only supported in python >= 3.3
|
||||
|
||||
:return:
|
||||
A float representing the seconds since 1970-01-01 UTC. This will be a negative value.
|
||||
"""
|
||||
|
||||
return self._y2k.timestamp() - self.DAYS_IN_2000_YEARS * 86400
|
||||
|
||||
def __str__(self):
|
||||
"""
|
||||
:return:
|
||||
A str representing this extended_datetime, e.g. "0000-01-01 00:00:00.000001-10:00"
|
||||
"""
|
||||
|
||||
return self.isoformat(sep=' ')
|
||||
|
||||
def __eq__(self, other):
|
||||
"""
|
||||
Compare two extended_datetime objects
|
||||
|
||||
:param other:
|
||||
The other extended_datetime to compare to
|
||||
|
||||
:return:
|
||||
A boolean
|
||||
"""
|
||||
|
||||
# Only compare against other datetime or extended_datetime objects
|
||||
if not isinstance(other, (self.__class__, datetime)):
|
||||
return False
|
||||
|
||||
# Offset-naive and offset-aware datetimes are never the same
|
||||
if (self.tzinfo is None) != (other.tzinfo is None):
|
||||
return False
|
||||
|
||||
return self.__cmp__(other) == 0
|
||||
|
||||
def __ne__(self, other):
|
||||
"""
|
||||
Compare two extended_datetime objects
|
||||
|
||||
:param other:
|
||||
The other extended_datetime to compare to
|
||||
|
||||
:return:
|
||||
A boolean
|
||||
"""
|
||||
|
||||
return not self.__eq__(other)
|
||||
|
||||
def _comparison_error(self, other):
|
||||
"""
|
||||
Raises a TypeError about the other object not being suitable for
|
||||
comparison
|
||||
|
||||
:param other:
|
||||
The object being compared to
|
||||
"""
|
||||
|
||||
raise TypeError(unwrap(
|
||||
'''
|
||||
An asn1crypto.util.extended_datetime object can only be compared to
|
||||
an asn1crypto.util.extended_datetime or datetime.datetime object,
|
||||
not %s
|
||||
''',
|
||||
type_name(other)
|
||||
))
|
||||
|
||||
def __cmp__(self, other):
|
||||
"""
|
||||
Compare two extended_datetime or datetime.datetime objects
|
||||
|
||||
:param other:
|
||||
The other extended_datetime or datetime.datetime object to compare to
|
||||
|
||||
:return:
|
||||
An integer smaller than, equal to, or larger than 0
|
||||
"""
|
||||
|
||||
if not isinstance(other, (self.__class__, datetime)):
|
||||
self._comparison_error(other)
|
||||
|
||||
if (self.tzinfo is None) != (other.tzinfo is None):
|
||||
raise TypeError("can't compare offset-naive and offset-aware datetimes")
|
||||
|
||||
diff = self - other
|
||||
zero = timedelta(0)
|
||||
if diff < zero:
|
||||
return -1
|
||||
if diff > zero:
|
||||
return 1
|
||||
return 0
|
||||
|
||||
def __lt__(self, other):
|
||||
return self.__cmp__(other) < 0
|
||||
|
||||
def __le__(self, other):
|
||||
return self.__cmp__(other) <= 0
|
||||
|
||||
def __gt__(self, other):
|
||||
return self.__cmp__(other) > 0
|
||||
|
||||
def __ge__(self, other):
|
||||
return self.__cmp__(other) >= 0
|
||||
|
||||
def __add__(self, other):
|
||||
"""
|
||||
Adds a timedelta
|
||||
|
||||
:param other:
|
||||
A datetime.timedelta object to add.
|
||||
|
||||
:return:
|
||||
A new extended_datetime or datetime.datetime object.
|
||||
"""
|
||||
|
||||
return extended_datetime.from_y2k(self._y2k + other)
|
||||
|
||||
def __sub__(self, other):
|
||||
"""
|
||||
Subtracts a timedelta or another datetime.
|
||||
|
||||
:param other:
|
||||
A datetime.timedelta or datetime.datetime or extended_datetime object to subtract.
|
||||
|
||||
:return:
|
||||
If a timedelta is passed, a new extended_datetime or datetime.datetime object.
|
||||
Else a datetime.timedelta object.
|
||||
"""
|
||||
|
||||
if isinstance(other, timedelta):
|
||||
return extended_datetime.from_y2k(self._y2k - other)
|
||||
|
||||
if isinstance(other, extended_datetime):
|
||||
return self._y2k - other._y2k
|
||||
|
||||
if isinstance(other, datetime):
|
||||
return self._y2k - other - timedelta(days=self.DAYS_IN_2000_YEARS)
|
||||
|
||||
return NotImplemented
|
||||
|
||||
def __rsub__(self, other):
|
||||
return -(self - other)
|
||||
|
||||
@classmethod
|
||||
def from_y2k(cls, value):
|
||||
"""
|
||||
Revert substitution of year 2000.
|
||||
|
||||
:param value:
|
||||
A datetime.datetime object which is 2000 years in the future.
|
||||
:return:
|
||||
A new extended_datetime or datetime.datetime object.
|
||||
"""
|
||||
|
||||
year = value.year - 2000
|
||||
|
||||
if year > 0:
|
||||
new_cls = datetime
|
||||
else:
|
||||
new_cls = cls
|
||||
|
||||
return new_cls(
|
||||
year,
|
||||
value.month,
|
||||
value.day,
|
||||
value.hour,
|
||||
value.minute,
|
||||
value.second,
|
||||
value.microsecond,
|
||||
value.tzinfo
|
||||
)
|
@ -1,6 +0,0 @@
|
||||
# coding: utf-8
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
|
||||
__version__ = '1.5.1'
|
||||
__version_info__ = (1, 5, 1)
|
File diff suppressed because it is too large
Load Diff
@ -1,496 +0,0 @@
|
||||
# changelog
|
||||
|
||||
## 1.5.1
|
||||
|
||||
- Handle RSASSA-PSS in `keys.PrivateKeyInfo.bit_size` and
|
||||
`keys.PublicKeyInfo.bit_size`
|
||||
- Handle RSASSA-PSS in `keys.PrivateKeyInfo.wrap` and
|
||||
`keys.PublicKeyInfo.wrap`
|
||||
- Updated docs for `keys.PrivateKeyInfo.algorithm` and
|
||||
`keys.PublicKeyInfo.algorithm` to reflect that they can return
|
||||
`"rsassa_pss"`
|
||||
|
||||
## 1.5.0
|
||||
|
||||
- Fix `tsp.TimeStampAndCRL` to be a `core.Sequence` instead of a
|
||||
`core.SequenceOf` *via @joernheissler*
|
||||
- Added OIDs for Edwards curves from RFC 8410 - via @MatthiasValvekens
|
||||
- Fixed convenience attributes on `algos.EncryptionAlgorithm` when the
|
||||
algorithm is RC2 *via @joernheissler*
|
||||
- Added Microsoft OIDs `microsoft_enrollment_csp_provider`
|
||||
(`1.3.6.1.4.1.311.13.2.2`), `microsoft_os_version`
|
||||
(`1.3.6.1.4.1.311.13.2.3`) and `microsoft_request_client_info`
|
||||
(`1.3.6.1.4.1.311.21.20`)
|
||||
to `csr.CSRAttributeType` along with supporting extension structures
|
||||
*via @qha*
|
||||
- Added Microsoft OID `microsoft_enroll_certtype` (`1.3.6.1.4.1.311.20.2`)
|
||||
to `x509.ExtensionId` *via @qha*
|
||||
- Fixed a few bugs with parsing indefinite-length encodings *via @davidben*
|
||||
- Added various bounds checks to parsing engine *via @davidben*
|
||||
- Fixed a bug with tags not always being minimally encoded *via @davidben*
|
||||
- Fixed `cms.RoleSyntax`, `cms.SecurityCategory` and `cms.AttCertIssuer` to
|
||||
have explicit instead of implicit tagging *via @MatthiasValvekens*
|
||||
- Fixed tagging of, and default value for fields in `cms.Clearance` *via
|
||||
@MatthiasValvekens*
|
||||
- Fixed calling `.dump(force=True)` when the value has undefined/unknown
|
||||
`core.Sequence` fields. Previously the value would be truncated, now
|
||||
the existing encoding is preserved.
|
||||
- Added sMIME capabilities (`1.2.840.113549.1.9.15`) support from RFC 2633
|
||||
to `cms.CMSAttribute` *via Hellzed*
|
||||
|
||||
## 1.4.0
|
||||
|
||||
- `core.ObjectIdentifier` and all derived classes now obey X.660 §7.6 and
|
||||
thus restrict the first arc to 0 to 2, and the second arc to less than
|
||||
40 if the first arc is 0 or 1. This also fixes parsing of OIDs where the
|
||||
first arc is 2 and the second arc is greater than 39.
|
||||
- Fixed `keys.PublicKeyInfo.bit_size` to return an int rather than a float
|
||||
on Python 3 when working with elliptic curve keys
|
||||
- Fixed the `asn1crypto-tests` sdist on PyPi to work properly to generate a
|
||||
.whl
|
||||
|
||||
## 1.3.0
|
||||
|
||||
- Added `encrypt_key_pref` (`1.2.840.113549.1.9.16.2.11`) to
|
||||
`cms.CMSAttributeType()`, along with related structures
|
||||
- Added Brainpool curves from RFC 5639 to `keys.NamedCurve()`
|
||||
- Fixed `x509.Certificate().subject_directory_attributes_value`
|
||||
- Fixed some incorrectly computed minimum elliptic curve primary key
|
||||
encoding sizes in `keys.NamedCurve()`
|
||||
- Fixed a `TypeError` when trying to call `.untag()` or `.copy()` on a
|
||||
`core.UTCTime()` or `core.GeneralizedTime()`, or a value containing one,
|
||||
when using Python 2
|
||||
|
||||
## 1.2.0
|
||||
|
||||
- Added `asn1crypto.load_order()`, which returns a `list` of unicode strings
|
||||
of the names of the fully-qualified module names for all of submodules of
|
||||
the package. The module names are listed in their dependency load order.
|
||||
This is primarily intended for the sake of implementing hot reloading.
|
||||
|
||||
## 1.1.0
|
||||
|
||||
- Added User ID (`0.9.2342.19200300.100.1.1`) to `x509.NameType()`
|
||||
- Added various EC named curves to `keys.NamedCurve()`
|
||||
|
||||
## 1.0.1
|
||||
|
||||
- Fix an absolute import in `keys` to a relative import
|
||||
|
||||
## 1.0.0
|
||||
|
||||
- Backwards Compatibility Breaks
|
||||
- `cms.KeyEncryptionAlgorithmId().native` now returns the value
|
||||
`"rsaes_pkcs1v15"` for OID `1.2.840.113549.1.1.1` instead of `"rsa"`
|
||||
- Removed functionality to calculate public key values from private key
|
||||
values. Alternatives have been added to oscrypto.
|
||||
- `keys.PrivateKeyInfo().unwrap()` is now
|
||||
`oscrypto.asymmetric.PrivateKey().unwrap()`
|
||||
- `keys.PrivateKeyInfo().public_key` is now
|
||||
`oscrypto.asymmetric.PrivateKey().public_key.unwrap()`
|
||||
- `keys.PrivateKeyInfo().public_key_info` is now
|
||||
`oscrypto.asymmetric.PrivateKey().public_key.asn1`
|
||||
- `keys.PrivateKeyInfo().fingerprint` is now
|
||||
`oscrypto.asymmetric.PrivateKey().fingerprint`
|
||||
- `keys.PublicKeyInfo().unwrap()` is now
|
||||
`oscrypto.asymmetric.PublicKey().unwrap()`
|
||||
- `keys.PublicKeyInfo().fingerprint` is now
|
||||
`oscrypto.asymmetric.PublicKey().fingerprint`
|
||||
- Enhancements
|
||||
- Significantly improved parsing of `core.UTCTime()` and
|
||||
`core.GeneralizedTime()` values that include timezones and fractional
|
||||
seconds
|
||||
- `util.timezone` has a more complete implementation
|
||||
- `core.Choice()` may now be constructed by a 2-element tuple or a 1-key
|
||||
dict
|
||||
- Added `x509.Certificate().not_valid_before` and
|
||||
`x509.Certificate().not_valid_after`
|
||||
- Added `core.BitString().unused_bits`
|
||||
- Added `keys.NamedCurve.register()` for non-mainstream curve OIDs
|
||||
- No longer try to load optional performance dependency, `libcrypto`,
|
||||
on Mac or Linux
|
||||
- `ocsp.CertStatus().native` will now return meaningful unicode string
|
||||
values when the status choice is `"good"` or `"unknown"`. Previously
|
||||
both returned `None` due to the way the structure was designed.
|
||||
- Add support for explicit RSA SSA PSS (`1.2.840.113549.1.1.10`) to
|
||||
`keys.PublicKeyInfo()` and `keys.PrivateKeyInfo()`
|
||||
- Added structures for nested SHA-256 Windows PE signatures to
|
||||
`cms.CMSAttribute()`
|
||||
- Added RC4 (`1.2.840.113549.3.4`) to `algos.EncryptionAlgorithmId()`
|
||||
- Added secp256k1 (`1.3.132.0.10`) to `keys.NamedCurve()`
|
||||
- Added SHA-3 and SHAKE OIDs to `algos.DigestAlgorithmId()` and
|
||||
`algos.HmacAlgorithmId()`
|
||||
- Added RSA ES OAEP (`1.2.840.113549.1.1.7`) to
|
||||
`cms.KeyEncryptionAlgorithmId()`
|
||||
- Add IKE Intermediate (`1.3.6.1.5.5.8.2.2`) to `x509.KeyPurposeId()`
|
||||
- `x509.EmailAddress()` and `x509.DNSName()` now handle invalidly-encoded
|
||||
values using tags for `core.PrintableString()` and `core.UTF8String()`
|
||||
- Add parameter structue from RFC 5084 for AES-CCM to
|
||||
`algos.EncryptionAlgorithm()`
|
||||
- Improved robustness of parsing broken `core.Sequence()` and
|
||||
`core.SequenceOf()` values
|
||||
- Bug Fixes
|
||||
- Fixed encoding of tag values over 30
|
||||
- `core.IntegerBitString()` and `core.IntegerOctetString()` now restrict
|
||||
values to non-negative integers since negative values are not
|
||||
implemented
|
||||
- When copying or dumping a BER-encoded indefinite-length value,
|
||||
automatically force re-encoding to DER. *To ensure all nested values are
|
||||
always DER-encoded, `.dump(True)` must be called.*
|
||||
- Fix `UnboundLocalError` when calling `x509.IPAddress().native` on an
|
||||
encoded value that has a length of zero
|
||||
- Fixed passing `class_` via unicode string name to `core.Asn1Value()`
|
||||
- Fixed a bug where EC private keys with leading null bytes would be
|
||||
encoded in `keys.ECPrivateKey()` more narrowly than RFC 5915 requires
|
||||
- Fixed some edge-case bugs in `util.int_to_bytes()`
|
||||
- `x509.URI()` now only normalizes values when comparing
|
||||
- Fixed BER-decoding of indefinite length `core.BitString()`
|
||||
- Fixed DER-encoding of empty `core.BitString()`
|
||||
- Fixed a missing return value for `core.Choice().parse()`
|
||||
- Fixed `core.Choice().contents` working when the chosen alternative is a
|
||||
`core.Choice()` also
|
||||
- Fixed parsing and encoding of nested `core.Choice()` objects
|
||||
- Fixed a bug causing `core.ObjectIdentifier().native` to sometimes not
|
||||
map the OID
|
||||
- Packaging
|
||||
- `wheel`, `sdist` and `bdist_egg` releases now all include LICENSE,
|
||||
`sdist` includes docs
|
||||
- Added `asn1crypto_tests` package to PyPi
|
||||
|
||||
## 0.24.0
|
||||
|
||||
- `x509.Certificate().self_signed` will no longer return `"yes"` under any
|
||||
circumstances. This helps prevent confusion since the library does not
|
||||
verify the signature. Instead a library like oscrypto should be used
|
||||
to confirm if a certificate is self-signed.
|
||||
- Added various OIDs to `x509.KeyPurposeId()`
|
||||
- Added `x509.Certificate().private_key_usage_period_value`
|
||||
- Added structures for parsing common subject directory attributes for
|
||||
X.509 certificates, including `x509.SubjectDirectoryAttribute()`
|
||||
- Added `algos.AnyAlgorithmIdentifier()` for situations where an
|
||||
algorithm identifier may contain a digest, signed digest or encryption
|
||||
algorithm OID
|
||||
- Fixed a bug with `x509.Certificate().subject_directory_attributes_value`
|
||||
not returning the correct value
|
||||
- Fixed a bug where explicitly-tagged fields in a `core.Sequence()` would
|
||||
not function properly when the field had a default value
|
||||
- Fixed a bug with type checking in `pem.armor()`
|
||||
|
||||
## 0.23.0
|
||||
|
||||
- Backwards compatibility break: the `tag_type`, `explicit_tag` and
|
||||
`explicit_class` attributes on `core.Asn1Value` no longer exist and were
|
||||
replaced by the `implicit` and `explicit` attributes. Field param dicts
|
||||
may use the new `explicit` and `implicit` keys, or the old `tag_type` and
|
||||
`tag` keys. The attribute changes will likely to have little to no impact
|
||||
since they were primarily an implementation detail.
|
||||
- Teletex strings used inside of X.509 certificates are now interpreted
|
||||
using Windows-1252 (a superset of ISO-8859-1). This enables compatibility
|
||||
with certificates generated by OpenSSL. Strict parsing of Teletex strings
|
||||
can be retained by using the `x509.strict_teletex()` context manager.
|
||||
- Added support for nested explicit tagging, supporting values that are
|
||||
defined with explicit tagging and then added as a field of another
|
||||
structure using explicit tagging.
|
||||
- Fixed a `UnicodeDecodeError` when trying to find the (optional) dependency
|
||||
OpenSSL on Python 2
|
||||
- Fixed `next_update` field of `crl.TbsCertList` to be optional
|
||||
- Added the `x509.Certificate.sha256_fingerprint` property
|
||||
- `x509.Certificate.ocsp_urls` and `x509.DistributionPoint.url` will now
|
||||
return `https://`, `ldap://` and `ldaps://` URLs in addition to `http://`.
|
||||
- Added CMS Attribute Protection definitions from RFC 6211
|
||||
- Added OIDs from RFC 6962
|
||||
|
||||
## 0.22.0
|
||||
|
||||
- Added `parser.peek()`
|
||||
- Implemented proper support for BER-encoded indefinite length strings of
|
||||
all kinds - `core.BitString`, `core.OctetString` and all of the `core`
|
||||
classes that are natively represented as Python unicode strings
|
||||
- Fixed a bug with encoding LDAP URLs in `x509.URI`
|
||||
- Correct `x509.DNSName` to allow a leading `.`, such as when used with
|
||||
`x509.NameConstraints`
|
||||
- Fixed an issue with dumping the parsed contents of `core.Any` when
|
||||
explicitly tagged
|
||||
- Custom `setup.py clean` now accepts the short `-a` flag for compatibility
|
||||
|
||||
## 0.21.1
|
||||
|
||||
- Fixed a regression where explicit tagging of a field containing a
|
||||
`core.Choice` would result in an incorrect header
|
||||
- Fixed a bug where an `IndexError` was being raised instead of a `ValueError`
|
||||
when a value was truncated to not include enough bytes for the header
|
||||
- Corrected the spec for the `value` field of `pkcs12.Attribute`
|
||||
- Added support for `2.16.840.1.113894.746875.1.1` OID to
|
||||
`pkcs12.AttributeType`
|
||||
|
||||
## 0.21.0
|
||||
|
||||
- Added `core.load()` for loading standard, universal types without knowing
|
||||
the spec beforehand
|
||||
- Added a `strict` keyword arg to the various `load()` methods and functions in
|
||||
`core` that checks for trailing data and raises a `ValueError` when found
|
||||
- Added `asn1crypto.parser` submodule with `emit()` and `parse()` functions for
|
||||
low-level integration
|
||||
- Added `asn1crypto.version` for version introspection without side-effects
|
||||
- Added `algos.DSASignature`
|
||||
- Fixed a bug with the `_header` attribute of explicitly-tagged values only
|
||||
containing the explicit tag header instead of both the explicit tag header
|
||||
and the encapsulated value header
|
||||
|
||||
## 0.20.0
|
||||
|
||||
- Added support for year 0
|
||||
- Added the OID for unique identifier to `x509.NameType`
|
||||
- Fixed a bug creating the native representation of a `core.BitString` with
|
||||
leading null bytes
|
||||
- Added a `.cast()` method to allow converting between different
|
||||
representations of the same data, e.g. `core.BitString` and
|
||||
`core.OctetBitString`
|
||||
|
||||
## 0.19.0
|
||||
|
||||
- Force `algos.DigestAlgorithm` to encoding `parameters` as `Null` when the
|
||||
`algorithm` is `sha1`, `sha224`, `sha256`, `sha384` or `sha512` per RFC 4055
|
||||
- Resolved an issue where a BER-encoded indefinite-length value could not be
|
||||
properly parsed when embedded inside of a `core.Sequence` or `core.Set`
|
||||
- Fix `x509.Name.build()` to properly handle dotted OID type values
|
||||
- `core.Choice` can now be constructed from a single-element `dict` or a
|
||||
two-element `tuple` to allow for better usability when constructing values
|
||||
from native Python values
|
||||
- All `core` objects can now be passed to `print()` with an exception being
|
||||
raised
|
||||
|
||||
## 0.18.5
|
||||
|
||||
- Don't fail importing if `ctypes` or `_ctypes` is not available
|
||||
|
||||
## 0.18.4
|
||||
|
||||
- `core.Sequence` will now raise an exception when an unknown field is provided
|
||||
- Prevent `UnicodeDecodeError` on Python 2 when calling
|
||||
`core.OctetString.debug()`
|
||||
- Corrected the default value for the `hash_algorithm` field of
|
||||
`tsp.ESSCertIDv2`
|
||||
- Fixed a bug constructing a `cms.SignedData` object
|
||||
- Ensure that specific RSA OIDs are always paired with `parameters` set to
|
||||
`core.Null`
|
||||
|
||||
## 0.18.3
|
||||
|
||||
- Fixed DER encoding of `core.BitString` when a `_map` is specified (i.e. a
|
||||
"named bit list") to omit trailing zero bits. This fixes compliance of
|
||||
various `x509` structures with RFC 5280.
|
||||
- Corrected a side effect in `keys.PrivateKeyInfo.wrap()` that would cause the
|
||||
original `keys.ECPrivateKey` structure to become corrupt
|
||||
- `core.IntegerOctetString` now correctly encodes the integer as an unsigned
|
||||
value when converting to bytes. Previously decoding was unsigned, but
|
||||
encoding was signed.
|
||||
- Fix `util.int_from_bytes()` on Python 2 to return `0` from an empty byte
|
||||
string
|
||||
|
||||
## 0.18.2
|
||||
|
||||
- Allow `_perf` submodule to be removed from source tree when embedding
|
||||
|
||||
## 0.18.1
|
||||
|
||||
- Fixed DER encoding of `core.Set` and `core.SetOf`
|
||||
- Fixed a bug in `x509.Name.build()` that could generate invalid DER encoding
|
||||
- Improved exception messages when parsing nested structures via the `.native`
|
||||
attribute
|
||||
- `algos.SignedDigestAlgorithm` now ensures the `parameters` are set to
|
||||
`Null` when `algorithm` is `sha224_rsa`, `sha256_rsa`, `sha384_rsa` or
|
||||
`sha512_rsa`, per RFC 4055
|
||||
- Corrected the definition of `pdf.AdobeTimestamp` to mark the
|
||||
`requires_auth` field as optional
|
||||
- Add support for the OID `1.2.840.113549.1.9.16.2.14` to
|
||||
`cms.CMSAttributeType`
|
||||
- Improve attribute support for `cms.AttributeCertificateV2`
|
||||
- Handle `cms.AttributeCertificateV2` when incorrectly tagged as
|
||||
`cms.AttributeCertificateV1` in `cms.CertificateChoices`
|
||||
|
||||
## 0.18.0
|
||||
|
||||
- Improved general parsing performance by 10-15%
|
||||
- Add support for Windows XP
|
||||
- Added `core.ObjectIdentifier.dotted` attribute to always return dotted
|
||||
integer unicode string
|
||||
- Added `core.ObjectIdentifier.map()` and `core.ObjectIdentifier.unmap()`
|
||||
class methods to map dotted integer unicode strings to user-friendly unicode
|
||||
strings and back
|
||||
- Added various Apple OIDs to `x509.KeyPurposeId`
|
||||
- Fixed a bug parsing nested indefinite-length-encoded values
|
||||
- Fixed a bug with `x509.Certificate.issuer_alt_name_value` if it is the first
|
||||
extension queried
|
||||
- `keys.PublicKeyInfo.bit_size` and `keys.PrivateKeyInfo.bit_size` values are
|
||||
now rounded up to the next closest multiple of 8
|
||||
|
||||
## 0.17.1
|
||||
|
||||
- Fix a bug in `x509.URI` parsing IRIs containing explicit port numbers on
|
||||
Python 3.x
|
||||
|
||||
## 0.17.0
|
||||
|
||||
- Added `x509.TrustedCertificate` for handling OpenSSL auxiliary certificate
|
||||
information appended after a certificate
|
||||
- Added `core.Concat` class for situations such as `x509.TrustedCertificate`
|
||||
- Allow "broken" X.509 certificates to use `core.IA5String` where an
|
||||
`x509.DirectoryString` should be used instead
|
||||
- Added `keys.PrivateKeyInfo.public_key_info` attribute
|
||||
- Added a bunch of OIDs to `x509.KeyPurposeId`
|
||||
|
||||
## 0.16.0
|
||||
|
||||
- Added DH key exchange structures: `algos.KeyExchangeAlgorithm`,
|
||||
`algos.KeyExchangeAlgorithmId` and `algos.DHParameters`.
|
||||
- Added DH public key support to `keys.PublicKeyInfo`,
|
||||
`keys.PublicKeyAlgorithm` and `keys.PublicKeyAlgorithmId`. New structures
|
||||
include `keys.DomainParameters` and `keys.ValidationParms`.
|
||||
|
||||
## 0.15.1
|
||||
|
||||
- Fixed `cms.CMSAttributes` to be a `core.SetOf` instead of `core.SequenceOf`
|
||||
- `cms.CMSAttribute` can now parse unknown attribute contrustruct without an
|
||||
exception being raised
|
||||
- `x509.PolicyMapping` now uses `x509.PolicyIdentifier` for field types
|
||||
- Fixed `pdf.RevocationInfoArchival` so that all fields are now of the type
|
||||
`core.SequenceOf` instead of a single value
|
||||
- Added support for the `name_distinguisher`, `telephone_number` and
|
||||
`organization_identifier` OIDs to `x509.Name`
|
||||
- Fixed `x509.Name.native` to not accidentally create nested lists when three
|
||||
of more values for a single type are part of the name
|
||||
- `x509.Name.human_friendly` now reverses the order of fields when the data
|
||||
in an `x509.Name` was encoded in most-specific to least-specific order, which
|
||||
is the opposite of the standard way of least-specific to most-specific.
|
||||
- `x509.NameType.human_friendly` no longer raises an exception when an
|
||||
unknown OID is encountered
|
||||
- Raise a `ValueError` when parsing a `core.Set` and an unknown field is
|
||||
encountered
|
||||
|
||||
## 0.15.0
|
||||
|
||||
- Added support for the TLS feature extension from RFC 7633
|
||||
- `x509.Name.build()` now accepts a keyword parameter `use_printable` to force
|
||||
string encoding to be `core.PrintableString` instead of `core.UTF8String`
|
||||
- Added the functions `util.uri_to_iri()` and `util.iri_to_uri()`
|
||||
- Changed `algos.SignedDigestAlgorithmId` to use the preferred OIDs when
|
||||
mapping a unicode string name to an OID. Previously there were multiple OIDs
|
||||
for some algorithms, and different OIDs would sometimes be selected due to
|
||||
the fact that the `_map` `dict` is not ordered.
|
||||
|
||||
## 0.14.1
|
||||
|
||||
- Fixed a bug generating `x509.Certificate.sha1_fingerprint` on Python 2
|
||||
|
||||
## 0.14.0
|
||||
|
||||
- Added the `x509.Certificate.sha1_fingerprint` attribute
|
||||
|
||||
## 0.13.0
|
||||
|
||||
- Backwards compatibility break: the native representation of some
|
||||
`algos.EncryptionAlgorithmId` values changed. `aes128` became `aes128_cbc`,
|
||||
`aes192` became `aes192_cbc` and `aes256` became `aes256_cbc`.
|
||||
- Added more OIDs to `algos.EncryptionAlgorithmId`
|
||||
- Added more OIDs to `cms.KeyEncryptionAlgorithmId`
|
||||
- `x509.Name.human_friendly` now properly supports multiple values per
|
||||
`x509.NameTypeAndValue` object
|
||||
- Added `ocsp.OCSPResponse.basic_ocsp_response` and
|
||||
`ocsp.OCSPResponse.response_data` properties
|
||||
- Added `algos.EncryptionAlgorithm.encryption_mode` property
|
||||
- Fixed a bug with parsing times containing timezone offsets in Python 3
|
||||
- The `attributes` field of `csr.CertificationRequestInfo` is now optional,
|
||||
for compatibility with other ASN.1 parsers
|
||||
|
||||
## 0.12.2
|
||||
|
||||
- Correct `core.Sequence.__setitem__()` so set `core.VOID` to an optional
|
||||
field when `None` is set
|
||||
|
||||
## 0.12.1
|
||||
|
||||
- Fixed a `unicode`/`bytes` bug with `x509.URI.dump()` on Python 2
|
||||
|
||||
## 0.12.0
|
||||
|
||||
- Backwards Compatibility Break: `core.NoValue` was renamed to `core.Void` and
|
||||
a singleton was added as `core.VOID`
|
||||
- 20-30% improvement in parsing performance
|
||||
- `core.Void` now implements `__nonzero__`
|
||||
- `core.Asn1Value.copy()` now performs a deep copy
|
||||
- All `core` value classes are now compatible with the `copy` module
|
||||
- `core.SequenceOf` and `core.SetOf` now implement `__contains__`
|
||||
- Added `x509.Name.__len__()`
|
||||
- Fixed a bug where `core.Choice.validate()` would not properly account for
|
||||
explicit tagging
|
||||
- `core.Choice.load()` now properly passes itself as the spec when parsing
|
||||
- `x509.Certificate.crl_distribution_points` no longer throws an exception if
|
||||
the `DistributionPoint` does not have a value for the `distribution_point`
|
||||
field
|
||||
|
||||
## 0.11.1
|
||||
|
||||
- Corrected `core.UTCTime` to interpret year <= 49 as 20xx and >= 50 as 19xx
|
||||
- `keys.PublicKeyInfo.hash_algo` can now handle DSA keys without parameters
|
||||
- Added `crl.CertificateList.sha256` and `crl.CertificateList.sha1`
|
||||
- Fixed `x509.Name.build()` to properly encode `country_name`, `serial_number`
|
||||
and `dn_qualifier` as `core.PrintableString` as specified in RFC 5280,
|
||||
instead of `core.UTF8String`
|
||||
|
||||
## 0.11.0
|
||||
|
||||
- Added Python 2.6 support
|
||||
- Added ability to compare primitive type objects
|
||||
- Implemented proper support for internationalized domains, URLs and email
|
||||
addresses in `x509.Certificate`
|
||||
- Comparing `x509.Name` and `x509.GeneralName` objects adheres to RFC 5280
|
||||
- `x509.Certificate.self_signed` and `x509.Certificate.self_issued` no longer
|
||||
require that certificate is for a CA
|
||||
- Fixed `x509.Certificate.valid_domains` to adhere to RFC 6125
|
||||
- Added `x509.Certificate.is_valid_domain_ip()`
|
||||
- Added `x509.Certificate.sha1` and `x509.Certificate.sha256`
|
||||
- Exposed `util.inet_ntop()` and `util.inet_pton()` for IP address encoding
|
||||
- Improved exception messages for improper types to include type's module name
|
||||
|
||||
## 0.10.1
|
||||
|
||||
- Fixed bug in `core.Sequence` affecting Python 2.7 and pypy
|
||||
|
||||
## 0.10.0
|
||||
|
||||
- Added PEM encoding/decoding functionality
|
||||
- `core.BitString` now uses item access instead of attributes for named bit
|
||||
access
|
||||
- `core.BitString.native` now uses a `set` of unicode strings when `_map` is
|
||||
present
|
||||
- Removed `core.Asn1Value.pprint()` method
|
||||
- Added `core.ParsableOctetString` class
|
||||
- Added `core.ParsableOctetBitString` class
|
||||
- Added `core.Asn1Value.copy()` method
|
||||
- Added `core.Asn1Value.debug()` method
|
||||
- Added `core.SequenceOf.append()` method
|
||||
- Added `core.Sequence.spec()` and `core.SequenceOf.spec()` methods
|
||||
- Added correct IP address parsing to `x509.GeneralName`
|
||||
- `x509.Name` and `x509.GeneralName` are now compared according to rules in
|
||||
RFC 5280
|
||||
- Added convenience attributes to:
|
||||
- `algos.SignedDigestAlgorithm`
|
||||
- `crl.CertificateList`
|
||||
- `crl.RevokedCertificate`
|
||||
- `keys.PublicKeyInfo`
|
||||
- `ocsp.OCSPRequest`
|
||||
- `ocsp.Request`
|
||||
- `ocsp.OCSPResponse`
|
||||
- `ocsp.SingleResponse`
|
||||
- `x509.Certificate`
|
||||
- `x509.Name`
|
||||
- Added `asn1crypto.util` module with the following items:
|
||||
- `int_to_bytes()`
|
||||
- `int_from_bytes()`
|
||||
- `timezone.utc`
|
||||
- Added `setup.py clean` command
|
||||
|
||||
## 0.9.0
|
||||
|
||||
- Initial release
|
@ -1,28 +0,0 @@
|
||||
# coding: utf-8
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
import os
|
||||
|
||||
|
||||
package_name = "asn1crypto"
|
||||
|
||||
other_packages = [
|
||||
"oscrypto",
|
||||
"certbuilder",
|
||||
"certvalidator",
|
||||
"crlbuilder",
|
||||
"csrbuilder",
|
||||
"ocspbuilder"
|
||||
]
|
||||
|
||||
task_keyword_args = []
|
||||
|
||||
requires_oscrypto = False
|
||||
has_tests_package = True
|
||||
|
||||
package_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
|
||||
build_root = os.path.abspath(os.path.join(package_root, '..'))
|
||||
|
||||
md_source_map = {}
|
||||
|
||||
definition_replacements = {}
|
@ -1,116 +0,0 @@
|
||||
# coding: utf-8
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
import imp
|
||||
import sys
|
||||
import os
|
||||
|
||||
from . import build_root, package_name, package_root
|
||||
|
||||
if sys.version_info < (3,):
|
||||
getcwd = os.getcwdu
|
||||
else:
|
||||
getcwd = os.getcwd
|
||||
|
||||
|
||||
def _import_from(mod, path, mod_dir=None, allow_error=False):
|
||||
"""
|
||||
Imports a module from a specific path
|
||||
|
||||
:param mod:
|
||||
A unicode string of the module name
|
||||
|
||||
:param path:
|
||||
A unicode string to the directory containing the module
|
||||
|
||||
:param mod_dir:
|
||||
If the sub directory of "path" is different than the "mod" name,
|
||||
pass the sub directory as a unicode string
|
||||
|
||||
:param allow_error:
|
||||
If an ImportError should be raised when the module can't be imported
|
||||
|
||||
:return:
|
||||
None if not loaded, otherwise the module
|
||||
"""
|
||||
|
||||
if mod_dir is None:
|
||||
mod_dir = mod.replace('.', os.sep)
|
||||
|
||||
if not os.path.exists(path):
|
||||
return None
|
||||
|
||||
if not os.path.exists(os.path.join(path, mod_dir)) \
|
||||
and not os.path.exists(os.path.join(path, mod_dir + '.py')):
|
||||
return None
|
||||
|
||||
if os.sep in mod_dir:
|
||||
append, mod_dir = mod_dir.rsplit(os.sep, 1)
|
||||
path = os.path.join(path, append)
|
||||
|
||||
try:
|
||||
mod_info = imp.find_module(mod_dir, [path])
|
||||
return imp.load_module(mod, *mod_info)
|
||||
except ImportError:
|
||||
if allow_error:
|
||||
raise
|
||||
return None
|
||||
|
||||
|
||||
def _preload(require_oscrypto, print_info):
|
||||
"""
|
||||
Preloads asn1crypto and optionally oscrypto from a local source checkout,
|
||||
or from a normal install
|
||||
|
||||
:param require_oscrypto:
|
||||
A bool if oscrypto needs to be preloaded
|
||||
|
||||
:param print_info:
|
||||
A bool if info about asn1crypto and oscrypto should be printed
|
||||
"""
|
||||
|
||||
if print_info:
|
||||
print('Working dir: ' + getcwd())
|
||||
print('Python ' + sys.version.replace('\n', ''))
|
||||
|
||||
asn1crypto = None
|
||||
oscrypto = None
|
||||
|
||||
if require_oscrypto:
|
||||
# Some CI services don't use the package name for the dir
|
||||
if package_name == 'oscrypto':
|
||||
oscrypto_dir = package_root
|
||||
else:
|
||||
oscrypto_dir = os.path.join(build_root, 'oscrypto')
|
||||
oscrypto_tests = None
|
||||
if os.path.exists(oscrypto_dir):
|
||||
oscrypto_tests = _import_from('oscrypto_tests', oscrypto_dir, 'tests')
|
||||
if oscrypto_tests is None:
|
||||
import oscrypto_tests
|
||||
asn1crypto, oscrypto = oscrypto_tests.local_oscrypto()
|
||||
|
||||
else:
|
||||
if package_name == 'asn1crypto':
|
||||
asn1crypto_dir = package_root
|
||||
else:
|
||||
asn1crypto_dir = os.path.join(build_root, 'asn1crypto')
|
||||
if os.path.exists(asn1crypto_dir):
|
||||
asn1crypto = _import_from('asn1crypto', asn1crypto_dir)
|
||||
if asn1crypto is None:
|
||||
import asn1crypto
|
||||
|
||||
if print_info:
|
||||
print(
|
||||
'\nasn1crypto: %s, %s' % (
|
||||
asn1crypto.__version__,
|
||||
os.path.dirname(asn1crypto.__file__)
|
||||
)
|
||||
)
|
||||
if require_oscrypto:
|
||||
print(
|
||||
'oscrypto: %s backend, %s, %s' % (
|
||||
oscrypto.backend(),
|
||||
oscrypto.__version__,
|
||||
os.path.dirname(oscrypto.__file__)
|
||||
)
|
||||
)
|
@ -1,205 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
This file was originally derived from
|
||||
https://github.com/pypa/pip/blob/3e713708088aedb1cde32f3c94333d6e29aaf86e/src/pip/_internal/pep425tags.py
|
||||
|
||||
The following license covers that code:
|
||||
|
||||
Copyright (c) 2008-2018 The pip developers (see AUTHORS.txt file)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
import sys
|
||||
import os
|
||||
import ctypes
|
||||
import re
|
||||
import platform
|
||||
|
||||
if sys.version_info >= (2, 7):
|
||||
import sysconfig
|
||||
|
||||
if sys.version_info < (3,):
|
||||
str_cls = unicode # noqa
|
||||
else:
|
||||
str_cls = str
|
||||
|
||||
|
||||
def _pep425_implementation():
|
||||
"""
|
||||
:return:
|
||||
A 2 character unicode string of the implementation - 'cp' for cpython
|
||||
or 'pp' for PyPy
|
||||
"""
|
||||
|
||||
return 'pp' if hasattr(sys, 'pypy_version_info') else 'cp'
|
||||
|
||||
|
||||
def _pep425_version():
|
||||
"""
|
||||
:return:
|
||||
A tuple of integers representing the Python version number
|
||||
"""
|
||||
|
||||
if hasattr(sys, 'pypy_version_info'):
|
||||
return (sys.version_info[0], sys.pypy_version_info.major,
|
||||
sys.pypy_version_info.minor)
|
||||
else:
|
||||
return (sys.version_info[0], sys.version_info[1])
|
||||
|
||||
|
||||
def _pep425_supports_manylinux():
|
||||
"""
|
||||
:return:
|
||||
A boolean indicating if the machine can use manylinux1 packages
|
||||
"""
|
||||
|
||||
try:
|
||||
import _manylinux
|
||||
return bool(_manylinux.manylinux1_compatible)
|
||||
except (ImportError, AttributeError):
|
||||
pass
|
||||
|
||||
# Check for glibc 2.5
|
||||
try:
|
||||
proc = ctypes.CDLL(None)
|
||||
gnu_get_libc_version = proc.gnu_get_libc_version
|
||||
gnu_get_libc_version.restype = ctypes.c_char_p
|
||||
|
||||
ver = gnu_get_libc_version()
|
||||
if not isinstance(ver, str_cls):
|
||||
ver = ver.decode('ascii')
|
||||
match = re.match(r'(\d+)\.(\d+)', ver)
|
||||
return match and match.group(1) == '2' and int(match.group(2)) >= 5
|
||||
|
||||
except (AttributeError):
|
||||
return False
|
||||
|
||||
|
||||
def _pep425_get_abi():
|
||||
"""
|
||||
:return:
|
||||
A unicode string of the system abi. Will be something like: "cp27m",
|
||||
"cp33m", etc.
|
||||
"""
|
||||
|
||||
try:
|
||||
soabi = sysconfig.get_config_var('SOABI')
|
||||
if soabi:
|
||||
if soabi.startswith('cpython-'):
|
||||
return 'cp%s' % soabi.split('-')[1]
|
||||
return soabi.replace('.', '_').replace('-', '_')
|
||||
except (IOError, NameError):
|
||||
pass
|
||||
|
||||
impl = _pep425_implementation()
|
||||
suffix = ''
|
||||
if impl == 'cp':
|
||||
suffix += 'm'
|
||||
if sys.maxunicode == 0x10ffff and sys.version_info < (3, 3):
|
||||
suffix += 'u'
|
||||
return '%s%s%s' % (impl, ''.join(map(str_cls, _pep425_version())), suffix)
|
||||
|
||||
|
||||
def _pep425tags():
|
||||
"""
|
||||
:return:
|
||||
A list of 3-element tuples with unicode strings or None:
|
||||
[0] implementation tag - cp33, pp27, cp26, py2, py2.py3
|
||||
[1] abi tag - cp26m, None
|
||||
[2] arch tag - linux_x86_64, macosx_10_10_x85_64, etc
|
||||
"""
|
||||
|
||||
tags = []
|
||||
|
||||
versions = []
|
||||
version_info = _pep425_version()
|
||||
major = version_info[:-1]
|
||||
for minor in range(version_info[-1], -1, -1):
|
||||
versions.append(''.join(map(str, major + (minor,))))
|
||||
|
||||
impl = _pep425_implementation()
|
||||
|
||||
abis = []
|
||||
abi = _pep425_get_abi()
|
||||
if abi:
|
||||
abis.append(abi)
|
||||
abi3 = _pep425_implementation() == 'cp' and sys.version_info >= (3,)
|
||||
if abi3:
|
||||
abis.append('abi3')
|
||||
abis.append('none')
|
||||
|
||||
if sys.platform == 'darwin':
|
||||
plat_ver = platform.mac_ver()
|
||||
ver_parts = plat_ver[0].split('.')
|
||||
minor = int(ver_parts[1])
|
||||
arch = plat_ver[2]
|
||||
if sys.maxsize == 2147483647:
|
||||
arch = 'i386'
|
||||
arches = []
|
||||
while minor > 5:
|
||||
arches.append('macosx_10_%s_%s' % (minor, arch))
|
||||
arches.append('macosx_10_%s_intel' % (minor,))
|
||||
arches.append('macosx_10_%s_universal' % (minor,))
|
||||
minor -= 1
|
||||
else:
|
||||
if sys.platform == 'win32':
|
||||
if 'amd64' in sys.version.lower():
|
||||
arches = ['win_amd64']
|
||||
else:
|
||||
arches = [sys.platform]
|
||||
elif hasattr(os, 'uname'):
|
||||
(plat, _, _, _, machine) = os.uname()
|
||||
plat = plat.lower().replace('/', '')
|
||||
machine.replace(' ', '_').replace('/', '_')
|
||||
if plat == 'linux' and sys.maxsize == 2147483647 and 'arm' not in machine:
|
||||
machine = 'i686'
|
||||
arch = '%s_%s' % (plat, machine)
|
||||
if _pep425_supports_manylinux():
|
||||
arches = [arch.replace('linux', 'manylinux1'), arch]
|
||||
else:
|
||||
arches = [arch]
|
||||
|
||||
for abi in abis:
|
||||
for arch in arches:
|
||||
tags.append(('%s%s' % (impl, versions[0]), abi, arch))
|
||||
|
||||
if abi3:
|
||||
for version in versions[1:]:
|
||||
for arch in arches:
|
||||
tags.append(('%s%s' % (impl, version), 'abi3', arch))
|
||||
|
||||
for arch in arches:
|
||||
tags.append(('py%s' % (versions[0][0]), 'none', arch))
|
||||
|
||||
tags.append(('%s%s' % (impl, versions[0]), 'none', 'any'))
|
||||
tags.append(('%s%s' % (impl, versions[0][0]), 'none', 'any'))
|
||||
|
||||
for i, version in enumerate(versions):
|
||||
tags.append(('py%s' % (version,), 'none', 'any'))
|
||||
if i == 0:
|
||||
tags.append(('py%s' % (version[0]), 'none', 'any'))
|
||||
|
||||
tags.append(('py2.py3', 'none', 'any'))
|
||||
|
||||
return tags
|
@ -1,163 +0,0 @@
|
||||
# coding: utf-8
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
import ast
|
||||
import _ast
|
||||
import os
|
||||
import sys
|
||||
|
||||
from . import package_root, task_keyword_args
|
||||
from ._import import _import_from
|
||||
|
||||
|
||||
if sys.version_info < (3,):
|
||||
byte_cls = str
|
||||
else:
|
||||
byte_cls = bytes
|
||||
|
||||
|
||||
def _list_tasks():
|
||||
"""
|
||||
Fetches a list of all valid tasks that may be run, and the args they
|
||||
accept. Does not actually import the task module to prevent errors if a
|
||||
user does not have the dependencies installed for every task.
|
||||
|
||||
:return:
|
||||
A list of 2-element tuples:
|
||||
0: a unicode string of the task name
|
||||
1: a list of dicts containing the parameter definitions
|
||||
"""
|
||||
|
||||
out = []
|
||||
dev_path = os.path.join(package_root, 'dev')
|
||||
for fname in sorted(os.listdir(dev_path)):
|
||||
if fname.startswith('.') or fname.startswith('_'):
|
||||
continue
|
||||
if not fname.endswith('.py'):
|
||||
continue
|
||||
name = fname[:-3]
|
||||
args = ()
|
||||
|
||||
full_path = os.path.join(package_root, 'dev', fname)
|
||||
with open(full_path, 'rb') as f:
|
||||
full_code = f.read()
|
||||
if sys.version_info >= (3,):
|
||||
full_code = full_code.decode('utf-8')
|
||||
|
||||
task_node = ast.parse(full_code, filename=full_path)
|
||||
for node in ast.iter_child_nodes(task_node):
|
||||
if isinstance(node, _ast.Assign):
|
||||
if len(node.targets) == 1 \
|
||||
and isinstance(node.targets[0], _ast.Name) \
|
||||
and node.targets[0].id == 'run_args':
|
||||
args = ast.literal_eval(node.value)
|
||||
break
|
||||
|
||||
out.append((name, args))
|
||||
return out
|
||||
|
||||
|
||||
def show_usage():
|
||||
"""
|
||||
Prints to stderr the valid options for invoking tasks
|
||||
"""
|
||||
|
||||
valid_tasks = []
|
||||
for task in _list_tasks():
|
||||
usage = task[0]
|
||||
for run_arg in task[1]:
|
||||
usage += ' '
|
||||
name = run_arg.get('name', '')
|
||||
if run_arg.get('required', False):
|
||||
usage += '{%s}' % name
|
||||
else:
|
||||
usage += '[%s]' % name
|
||||
valid_tasks.append(usage)
|
||||
|
||||
out = 'Usage: run.py'
|
||||
for karg in task_keyword_args:
|
||||
out += ' [%s=%s]' % (karg['name'], karg['placeholder'])
|
||||
out += ' (%s)' % ' | '.join(valid_tasks)
|
||||
|
||||
print(out, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def _get_arg(num):
|
||||
"""
|
||||
:return:
|
||||
A unicode string of the requested command line arg
|
||||
"""
|
||||
|
||||
if len(sys.argv) < num + 1:
|
||||
return None
|
||||
arg = sys.argv[num]
|
||||
if isinstance(arg, byte_cls):
|
||||
arg = arg.decode('utf-8')
|
||||
return arg
|
||||
|
||||
|
||||
def run_task():
|
||||
"""
|
||||
Parses the command line args, invoking the requested task
|
||||
"""
|
||||
|
||||
arg_num = 1
|
||||
task = None
|
||||
args = []
|
||||
kwargs = {}
|
||||
|
||||
# We look for the task name, processing any global task keyword args
|
||||
# by setting the appropriate env var
|
||||
while True:
|
||||
val = _get_arg(arg_num)
|
||||
if val is None:
|
||||
break
|
||||
|
||||
next_arg = False
|
||||
for karg in task_keyword_args:
|
||||
if val.startswith(karg['name'] + '='):
|
||||
os.environ[karg['env_var']] = val[len(karg['name']) + 1:]
|
||||
next_arg = True
|
||||
break
|
||||
|
||||
if next_arg:
|
||||
arg_num += 1
|
||||
continue
|
||||
|
||||
task = val
|
||||
break
|
||||
|
||||
if task is None:
|
||||
show_usage()
|
||||
|
||||
task_mod = _import_from('dev.%s' % task, package_root, allow_error=True)
|
||||
if task_mod is None:
|
||||
show_usage()
|
||||
|
||||
run_args = task_mod.__dict__.get('run_args', [])
|
||||
max_args = arg_num + 1 + len(run_args)
|
||||
|
||||
if len(sys.argv) > max_args:
|
||||
show_usage()
|
||||
|
||||
for i, run_arg in enumerate(run_args):
|
||||
val = _get_arg(arg_num + 1 + i)
|
||||
if val is None:
|
||||
if run_arg.get('required', False):
|
||||
show_usage()
|
||||
break
|
||||
|
||||
if run_arg.get('cast') == 'int' and val.isdigit():
|
||||
val = int(val)
|
||||
|
||||
kwarg = run_arg.get('kwarg')
|
||||
if kwarg:
|
||||
kwargs[kwarg] = val
|
||||
else:
|
||||
args.append(val)
|
||||
|
||||
run = task_mod.__dict__.get('run')
|
||||
|
||||
result = run(*args, **kwargs)
|
||||
sys.exit(int(not result))
|
@ -1,89 +0,0 @@
|
||||
# coding: utf-8
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
import imp
|
||||
import os
|
||||
import tarfile
|
||||
import zipfile
|
||||
|
||||
import setuptools.sandbox
|
||||
|
||||
from . import package_root, package_name, has_tests_package
|
||||
|
||||
|
||||
def _list_zip(filename):
|
||||
"""
|
||||
Prints all of the files in a .zip file
|
||||
"""
|
||||
|
||||
zf = zipfile.ZipFile(filename, 'r')
|
||||
for name in zf.namelist():
|
||||
print(' %s' % name)
|
||||
|
||||
|
||||
def _list_tgz(filename):
|
||||
"""
|
||||
Prints all of the files in a .tar.gz file
|
||||
"""
|
||||
|
||||
tf = tarfile.open(filename, 'r:gz')
|
||||
for name in tf.getnames():
|
||||
print(' %s' % name)
|
||||
|
||||
|
||||
def run():
|
||||
"""
|
||||
Creates a sdist .tar.gz and a bdist_wheel --univeral .whl
|
||||
|
||||
:return:
|
||||
A bool - if the packaging process was successful
|
||||
"""
|
||||
|
||||
setup = os.path.join(package_root, 'setup.py')
|
||||
tests_root = os.path.join(package_root, 'tests')
|
||||
tests_setup = os.path.join(tests_root, 'setup.py')
|
||||
|
||||
# Trying to call setuptools.sandbox.run_setup(setup, ['--version'])
|
||||
# resulted in a segfault, so we do this instead
|
||||
module_info = imp.find_module('version', [os.path.join(package_root, package_name)])
|
||||
version_mod = imp.load_module('%s.version' % package_name, *module_info)
|
||||
|
||||
pkg_name_info = (package_name, version_mod.__version__)
|
||||
print('Building %s-%s' % pkg_name_info)
|
||||
|
||||
sdist = '%s-%s.tar.gz' % pkg_name_info
|
||||
whl = '%s-%s-py2.py3-none-any.whl' % pkg_name_info
|
||||
setuptools.sandbox.run_setup(setup, ['-q', 'sdist'])
|
||||
print(' - created %s' % sdist)
|
||||
_list_tgz(os.path.join(package_root, 'dist', sdist))
|
||||
setuptools.sandbox.run_setup(setup, ['-q', 'bdist_wheel', '--universal'])
|
||||
print(' - created %s' % whl)
|
||||
_list_zip(os.path.join(package_root, 'dist', whl))
|
||||
setuptools.sandbox.run_setup(setup, ['-q', 'clean'])
|
||||
|
||||
if has_tests_package:
|
||||
print('Building %s_tests-%s' % (package_name, version_mod.__version__))
|
||||
|
||||
tests_sdist = '%s_tests-%s.tar.gz' % pkg_name_info
|
||||
tests_whl = '%s_tests-%s-py2.py3-none-any.whl' % pkg_name_info
|
||||
setuptools.sandbox.run_setup(tests_setup, ['-q', 'sdist'])
|
||||
print(' - created %s' % tests_sdist)
|
||||
_list_tgz(os.path.join(tests_root, 'dist', tests_sdist))
|
||||
setuptools.sandbox.run_setup(tests_setup, ['-q', 'bdist_wheel', '--universal'])
|
||||
print(' - created %s' % tests_whl)
|
||||
_list_zip(os.path.join(tests_root, 'dist', tests_whl))
|
||||
setuptools.sandbox.run_setup(tests_setup, ['-q', 'clean'])
|
||||
|
||||
dist_dir = os.path.join(package_root, 'dist')
|
||||
tests_dist_dir = os.path.join(tests_root, 'dist')
|
||||
os.rename(
|
||||
os.path.join(tests_dist_dir, tests_sdist),
|
||||
os.path.join(dist_dir, tests_sdist)
|
||||
)
|
||||
os.rename(
|
||||
os.path.join(tests_dist_dir, tests_whl),
|
||||
os.path.join(dist_dir, tests_whl)
|
||||
)
|
||||
os.rmdir(tests_dist_dir)
|
||||
|
||||
return True
|
@ -1,73 +0,0 @@
|
||||
# coding: utf-8
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
import os
|
||||
import platform
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
|
||||
run_args = [
|
||||
{
|
||||
'name': 'cffi',
|
||||
'kwarg': 'cffi',
|
||||
},
|
||||
{
|
||||
'name': 'openssl',
|
||||
'kwarg': 'openssl',
|
||||
},
|
||||
{
|
||||
'name': 'winlegacy',
|
||||
'kwarg': 'winlegacy',
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
def _write_env(env, key, value):
|
||||
sys.stdout.write("%s: %s\n" % (key, value))
|
||||
sys.stdout.flush()
|
||||
if sys.version_info < (3,):
|
||||
env[key.encode('utf-8')] = value.encode('utf-8')
|
||||
else:
|
||||
env[key] = value
|
||||
|
||||
|
||||
def run(**_):
|
||||
"""
|
||||
Runs CI, setting various env vars
|
||||
|
||||
:return:
|
||||
A bool - if the CI ran successfully
|
||||
"""
|
||||
|
||||
env = os.environ.copy()
|
||||
options = set(sys.argv[2:])
|
||||
|
||||
newline = False
|
||||
if 'cffi' not in options:
|
||||
_write_env(env, 'OSCRYPTO_USE_CTYPES', 'true')
|
||||
newline = True
|
||||
if 'openssl' in options and sys.platform == 'darwin':
|
||||
mac_version_info = tuple(map(int, platform.mac_ver()[0].split('.')[:2]))
|
||||
if mac_version_info < (10, 15):
|
||||
_write_env(env, 'OSCRYPTO_USE_OPENSSL', '/usr/lib/libcrypto.dylib,/usr/lib/libssl.dylib')
|
||||
else:
|
||||
_write_env(env, 'OSCRYPTO_USE_OPENSSL', '/usr/lib/libcrypto.35.dylib,/usr/lib/libssl.35.dylib')
|
||||
newline = True
|
||||
if 'winlegacy' in options:
|
||||
_write_env(env, 'OSCRYPTO_USE_WINLEGACY', 'true')
|
||||
newline = True
|
||||
|
||||
if newline:
|
||||
sys.stdout.write("\n")
|
||||
|
||||
proc = subprocess.Popen(
|
||||
[
|
||||
sys.executable,
|
||||
'run.py',
|
||||
'ci',
|
||||
],
|
||||
env=env
|
||||
)
|
||||
proc.communicate()
|
||||
return proc.returncode == 0
|
@ -1,57 +0,0 @@
|
||||
# coding: utf-8
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
import os
|
||||
import site
|
||||
import sys
|
||||
|
||||
from . import build_root, requires_oscrypto
|
||||
from ._import import _preload
|
||||
|
||||
|
||||
deps_dir = os.path.join(build_root, 'modularcrypto-deps')
|
||||
if os.path.exists(deps_dir):
|
||||
site.addsitedir(deps_dir)
|
||||
|
||||
if sys.version_info[0:2] not in [(2, 6), (3, 2)]:
|
||||
from .lint import run as run_lint
|
||||
else:
|
||||
run_lint = None
|
||||
|
||||
if sys.version_info[0:2] != (3, 2):
|
||||
from .coverage import run as run_coverage
|
||||
from .coverage import coverage
|
||||
run_tests = None
|
||||
|
||||
else:
|
||||
from .tests import run as run_tests
|
||||
run_coverage = None
|
||||
|
||||
|
||||
def run():
|
||||
"""
|
||||
Runs the linter and tests
|
||||
|
||||
:return:
|
||||
A bool - if the linter and tests ran successfully
|
||||
"""
|
||||
|
||||
_preload(requires_oscrypto, True)
|
||||
|
||||
if run_lint:
|
||||
print('')
|
||||
lint_result = run_lint()
|
||||
else:
|
||||
lint_result = True
|
||||
|
||||
if run_coverage:
|
||||
print('\nRunning tests (via coverage.py %s)' % coverage.__version__)
|
||||
sys.stdout.flush()
|
||||
tests_result = run_coverage(ci=True)
|
||||
else:
|
||||
print('\nRunning tests')
|
||||
sys.stdout.flush()
|
||||
tests_result = run_tests(ci=True)
|
||||
sys.stdout.flush()
|
||||
|
||||
return lint_result and tests_result
|
@ -1,5 +0,0 @@
|
||||
{
|
||||
"slug": "wbond/asn1crypto",
|
||||
"token": "98876f5e-6517-4def-85ce-c6e508eee35a",
|
||||
"disabled": true
|
||||
}
|
@ -1,677 +0,0 @@
|
||||
# coding: utf-8
|
||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
||||
|
||||
import cgi
|
||||
import codecs
|
||||
import coverage
|
||||
import imp
|
||||
import json
|
||||
import os
|
||||
import unittest
|
||||
import re
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
import platform as _plat
|
||||
import subprocess
|
||||
from fnmatch import fnmatch
|
||||
|
||||
from . import package_name, package_root, other_packages
|
||||
|
||||
if sys.version_info < (3,):
|
||||
str_cls = unicode # noqa
|
||||
from urllib2 import URLError
|
||||
from urllib import urlencode
|
||||
from io import open
|
||||
else:
|
||||
str_cls = str
|
||||
from urllib.error import URLError
|
||||
from urllib.parse import urlencode
|
||||
|
||||
if sys.version_info < (3, 7):
|
||||
Pattern = re._pattern_type
|
||||
else:
|
||||
Pattern = re.Pattern
|
||||
|
||||
|
||||
def run(ci=False):
|
||||
"""
|
||||
Runs the tests while measuring coverage
|
||||
|
||||
:param ci:
|
||||
If coverage is being run in a CI environment - this triggers trying to
|
||||
run the tests for the rest of modularcrypto and uploading coverage data
|
||||
|
||||
:return:
|
||||
A bool - if the tests ran successfully
|
||||
"""
|
||||
|
||||
xml_report_path = os.path.join(package_root, 'coverage.xml')
|
||||
if os.path.exists(xml_report_path):
|
||||
os.unlink(xml_report_path)
|
||||
|
||||
cov = coverage.Coverage(include='%s/*.py' % package_name)
|
||||
cov.start()
|
||||
|
||||
from .tests import run as run_tests
|
||||
result = run_tests(ci=ci)
|
||||
print()
|
||||
|
||||
if ci:
|
||||
suite = unittest.TestSuite()
|
||||
loader = unittest.TestLoader()
|
||||
for other_package in other_packages:
|
||||
for test_class in _load_package_tests(other_package):
|
||||
suite.addTest(loader.loadTestsFromTestCase(test_class))
|
||||
|
||||
if suite.countTestCases() > 0:
|
||||
print('Running tests from other modularcrypto packages')
|
||||
sys.stdout.flush()
|
||||
runner_result = unittest.TextTestRunner(stream=sys.stdout, verbosity=1).run(suite)
|
||||
result = runner_result.wasSuccessful() and result
|
||||
print()
|
||||
sys.stdout.flush()
|
||||
|
||||
cov.stop()
|
||||
cov.save()
|
||||
|
||||
cov.report(show_missing=False)
|
||||
print()
|
||||
sys.stdout.flush()
|
||||
if ci:
|
||||
cov.xml_report()
|
||||
|
||||
if ci and result and os.path.exists(xml_report_path):
|
||||
_codecov_submit()
|
||||
print()
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _load_package_tests(name):
|
||||
"""
|
||||
Load the test classes from another modularcrypto package
|
||||
|
||||
:param name:
|
||||
A unicode string of the other package name
|
||||
|
||||
:return:
|
||||
A list of unittest.TestCase classes of the tests for the package
|
||||
"""
|
||||
|
||||
package_dir = os.path.join('..', name)
|
||||
if not os.path.exists(package_dir):
|
||||
return []
|
||||
|
||||
tests_module_info = imp.find_module('tests', [package_dir])
|
||||
tests_module = imp.load_module('%s.tests' % name, *tests_module_info)
|
||||
return tests_module.test_classes()
|
||||
|
||||
|
||||
def _env_info():
|
||||
"""
|
||||
:return:
|
||||
A two-element tuple of unicode strings. The first is the name of the
|
||||
environment, the second the root of the repo. The environment name
|
||||
will be one of: "ci-travis", "ci-circle", "ci-appveyor",
|
||||
"ci-github-actions", "local"
|
||||
"""
|
||||
|
||||
if os.getenv('CI') == 'true' and os.getenv('TRAVIS') == 'true':
|
||||
return ('ci-travis', os.getenv('TRAVIS_BUILD_DIR'))
|
||||
|
||||
if os.getenv('CI') == 'True' and os.getenv('APPVEYOR') == 'True':
|
||||
return ('ci-appveyor', os.getenv('APPVEYOR_BUILD_FOLDER'))
|
||||
|
||||
if os.getenv('CI') == 'true' and os.getenv('CIRCLECI') == 'true':
|
||||
return ('ci-circle', os.getcwdu() if sys.version_info < (3,) else os.getcwd())
|
||||
|
||||
if os.getenv('GITHUB_ACTIONS') == 'true':
|
||||
return ('ci-github-actions', os.getenv('GITHUB_WORKSPACE'))
|
||||
|
||||
return ('local', package_root)
|
||||
|
||||
|
||||
def _codecov_submit():
|
||||
env_name, root = _env_info()
|
||||
|
||||
try:
|
||||
with open(os.path.join(root, 'dev/codecov.json'), 'rb') as f:
|
||||
json_data = json.loads(f.read().decode('utf-8'))
|
||||
except (OSError, ValueError, UnicodeDecodeError, KeyError):
|
||||
print('error reading codecov.json')
|
||||
return
|
||||
|
||||
if json_data.get('disabled'):
|
||||
return
|
||||
|
||||
if env_name == 'ci-travis':
|
||||
# http://docs.travis-ci.com/user/environment-variables/#Default-Environment-Variables
|
||||
build_url = 'https://travis-ci.org/%s/jobs/%s' % (os.getenv('TRAVIS_REPO_SLUG'), os.getenv('TRAVIS_JOB_ID'))
|
||||
query = {
|
||||
'service': 'travis',
|
||||
'branch': os.getenv('TRAVIS_BRANCH'),
|
||||
'build': os.getenv('TRAVIS_JOB_NUMBER'),
|
||||
'pr': os.getenv('TRAVIS_PULL_REQUEST'),
|
||||
'job': os.getenv('TRAVIS_JOB_ID'),
|
||||
'tag': os.getenv('TRAVIS_TAG'),
|
||||
'slug': os.getenv('TRAVIS_REPO_SLUG'),
|
||||
'commit': os.getenv('TRAVIS_COMMIT'),
|
||||
'build_url': build_url,
|
||||
}
|
||||
|
||||
elif env_name == 'ci-appveyor':
|
||||
# http://www.appveyor.com/docs/environment-variables
|
||||
build_url = 'https://ci.appveyor.com/project/%s/build/%s' % (
|
||||
os.getenv('APPVEYOR_REPO_NAME'),
|
||||
os.getenv('APPVEYOR_BUILD_VERSION')
|
||||
)
|
||||
query = {
|
||||
'service': "appveyor",
|
||||
'branch': os.getenv('APPVEYOR_REPO_BRANCH'),
|
||||
'build': os.getenv('APPVEYOR_JOB_ID'),
|
||||
'pr': os.getenv('APPVEYOR_PULL_REQUEST_NUMBER'),
|
||||
'job': '/'.join((
|
||||
os.getenv('APPVEYOR_ACCOUNT_NAME'),
|
||||
os.getenv('APPVEYOR_PROJECT_SLUG'),
|
||||
os.getenv('APPVEYOR_BUILD_VERSION')
|
||||
)),
|
||||
'tag': os.getenv('APPVEYOR_REPO_TAG_NAME'),
|
||||
'slug': os.getenv('APPVEYOR_REPO_NAME'),
|
||||
'commit': os.getenv('APPVEYOR_REPO_COMMIT'),
|
||||
'build_url': build_url,
|
||||
}
|
||||
|
||||
elif env_name == 'ci-circle':
|
||||
# https://circleci.com/docs/environment-variables
|
||||
query = {
|
||||
'service': 'circleci',
|
||||
'branch': os.getenv('CIRCLE_BRANCH'),
|
||||
'build': os.getenv('CIRCLE_BUILD_NUM'),
|
||||
'pr': os.getenv('CIRCLE_PR_NUMBER'),
|
||||
'job': os.getenv('CIRCLE_BUILD_NUM') + "." + os.getenv('CIRCLE_NODE_INDEX'),
|
||||
'tag': os.getenv('CIRCLE_TAG'),
|
||||
'slug': os.getenv('CIRCLE_PROJECT_USERNAME') + "/" + os.getenv('CIRCLE_PROJECT_REPONAME'),
|
||||
'commit': os.getenv('CIRCLE_SHA1'),
|
||||
'build_url': os.getenv('CIRCLE_BUILD_URL'),
|
||||
}
|
||||
|
||||
elif env_name == 'ci-github-actions':
|
||||
branch = ''
|
||||
tag = ''
|
||||
ref = os.getenv('GITHUB_REF', '')
|
||||
if ref.startswith('refs/tags/'):
|
||||
tag = ref[10:]
|
||||
elif ref.startswith('refs/heads/'):
|
||||
branch = ref[11:]
|
||||
|
||||
impl = _plat.python_implementation()
|
||||
major, minor = _plat.python_version_tuple()[0:2]
|
||||
build_name = '%s %s %s.%s' % (_platform_name(), impl, major, minor)
|
||||
|
||||
query = {
|
||||
'service': 'custom',
|
||||
'token': json_data['token'],
|
||||
'branch': branch,
|
||||
'tag': tag,
|
||||
'slug': os.getenv('GITHUB_REPOSITORY'),
|
||||
'commit': os.getenv('GITHUB_SHA'),
|
||||
'build_url': 'https://github.com/wbond/oscrypto/commit/%s/checks' % os.getenv('GITHUB_SHA'),
|
||||
'name': 'GitHub Actions %s on %s' % (build_name, os.getenv('RUNNER_OS'))
|
||||
}
|
||||
|
||||
else:
|
||||
if not os.path.exists(os.path.join(root, '.git')):
|
||||
print('git repository not found, not submitting coverage data')
|
||||
return
|
||||
git_status = _git_command(['status', '--porcelain'], root)
|
||||
if git_status != '':
|
||||
print('git repository has uncommitted changes, not submitting coverage data')
|
||||
return
|
||||
|
||||
branch = _git_command(['rev-parse', '--abbrev-ref', 'HEAD'], root)
|
||||
commit = _git_command(['rev-parse', '--verify', 'HEAD'], root)
|
||||
tag = _git_command(['name-rev', '--tags', '--name-only', commit], root)
|
||||
impl = _plat.python_implementation()
|
||||
major, minor = _plat.python_version_tuple()[0:2]
|
||||
build_name = '%s %s %s.%s' % (_platform_name(), impl, major, minor)
|
||||
query = {
|
||||
'branch': branch,
|
||||
'commit': commit,
|
||||
'slug': json_data['slug'],
|
||||
'token': json_data['token'],
|
||||
'build': build_name,
|
||||
}
|
||||
if tag != 'undefined':
|
||||
query['tag'] = tag
|
||||
|
||||
payload = 'PLATFORM=%s\n' % _platform_name()
|
||||
payload += 'PYTHON_VERSION=%s %s\n' % (_plat.python_version(), _plat.python_implementation())
|
||||
if 'oscrypto' in sys.modules:
|
||||
payload += 'OSCRYPTO_BACKEND=%s\n' % sys.modules['oscrypto'].backend()
|
||||
payload += '<<<<<< ENV\n'
|
||||
|
||||
for path in _list_files(root):
|
||||
payload += path + '\n'
|
||||
payload += '<<<<<< network\n'
|
||||
|
||||
payload += '# path=coverage.xml\n'
|
||||
with open(os.path.join(root, 'coverage.xml'), 'r', encoding='utf-8') as f:
|
||||
payload += f.read() + '\n'
|
||||
payload += '<<<<<< EOF\n'
|
||||
|
||||
url = 'https://codecov.io/upload/v4'
|
||||
headers = {
|
||||
'Accept': 'text/plain'
|
||||
}
|
||||
filtered_query = {}
|
||||
for key in query:
|
||||
value = query[key]
|
||||
if value == '' or value is None:
|
||||
continue
|
||||
filtered_query[key] = value
|
||||
|
||||
print('Submitting coverage info to codecov.io')
|
||||
info = _do_request(
|
||||
'POST',
|
||||
url,
|
||||
headers,
|
||||
query_params=filtered_query
|
||||
)
|
||||
|
||||
encoding = info[1] or 'utf-8'
|
||||
text = info[2].decode(encoding).strip()
|
||||
parts = text.split()
|
||||
upload_url = parts[1]
|
||||
|
||||
headers = {
|
||||
'Content-Type': 'text/plain',
|
||||
'x-amz-acl': 'public-read',
|
||||
'x-amz-storage-class': 'REDUCED_REDUNDANCY'
|
||||
}
|
||||
|
||||
print('Uploading coverage data to codecov.io S3 bucket')
|
||||
_do_request(
|
||||
'PUT',
|
||||
upload_url,
|
||||
headers,
|
||||
data=payload.encode('utf-8')
|
||||
)
|
||||
|
||||
|
||||
def _git_command(params, cwd):
|
||||
"""
|
||||
Executes a git command, returning the output
|
||||
|
||||
:param params:
|
||||
A list of the parameters to pass to git
|
||||
|
||||
:param cwd:
|
||||
The working directory to execute git in
|
||||
|
||||
:return:
|
||||
A 2-element tuple of (stdout, stderr)
|
||||
"""
|
||||
|
||||
proc = subprocess.Popen(
|
||||
['git'] + params,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
cwd=cwd
|
||||
)
|
||||
stdout, stderr = proc.communicate()
|
||||
code = proc.wait()
|
||||
if code != 0:
|
||||
e = OSError('git exit code was non-zero')
|
||||
e.stdout = stdout
|
||||
raise e
|
||||
return stdout.decode('utf-8').strip()
|
||||
|
||||
|
||||
def _parse_env_var_file(data):
|
||||
"""
|
||||
Parses a basic VAR="value data" file contents into a dict
|
||||
|
||||
:param data:
|
||||
A unicode string of the file data
|
||||
|
||||
:return:
|
||||
A dict of parsed name/value data
|
||||
"""
|
||||
|
||||
output = {}
|
||||
for line in data.splitlines():
|
||||
line = line.strip()
|
||||
if not line or '=' not in line:
|
||||
continue
|
||||
parts = line.split('=')
|
||||
if len(parts) != 2:
|
||||
continue
|
||||
name = parts[0]
|
||||
value = parts[1]
|
||||
if len(value) > 1:
|
||||
if value[0] == '"' and value[-1] == '"':
|
||||
value = value[1:-1]
|
||||
output[name] = value
|
||||
return output
|
||||
|
||||
|
||||
def _platform_name():
|
||||
"""
|
||||
Returns information about the current operating system and version
|
||||
|
||||
:return:
|
||||
A unicode string containing the OS name and version
|
||||
"""
|
||||
|
||||
if sys.platform == 'darwin':
|
||||
version = _plat.mac_ver()[0]
|
||||
_plat_ver_info = tuple(map(int, version.split('.')))
|
||||
if _plat_ver_info < (10, 12):
|
||||
name = 'OS X'
|
||||
else:
|
||||
name = 'macOS'
|
||||
return '%s %s' % (name, version)
|
||||
|
||||
elif sys.platform == 'win32':
|
||||
_win_ver = sys.getwindowsversion()
|
||||
_plat_ver_info = (_win_ver[0], _win_ver[1])
|
||||
return 'Windows %s' % _plat.win32_ver()[0]
|
||||
|
||||
elif sys.platform in ['linux', 'linux2']:
|
||||
if os.path.exists('/etc/os-release'):
|
||||
with open('/etc/os-release', 'r', encoding='utf-8') as f:
|
||||
pairs = _parse_env_var_file(f.read())
|
||||
if 'NAME' in pairs and 'VERSION_ID' in pairs:
|
||||
return '%s %s' % (pairs['NAME'], pairs['VERSION_ID'])
|
||||
version = pairs['VERSION_ID']
|
||||
elif 'PRETTY_NAME' in pairs:
|
||||
return pairs['PRETTY_NAME']
|
||||
elif 'NAME' in pairs:
|
||||
return pairs['NAME']
|
||||
else:
|
||||
raise ValueError('No suitable version info found in /etc/os-release')
|
||||
elif os.path.exists('/etc/lsb-release'):
|
||||
with open('/etc/lsb-release', 'r', encoding='utf-8') as f:
|
||||
pairs = _parse_env_var_file(f.read())
|
||||
if 'DISTRIB_DESCRIPTION' in pairs:
|
||||
return pairs['DISTRIB_DESCRIPTION']
|
||||
else:
|
||||
raise ValueError('No suitable version info found in /etc/lsb-release')
|
||||
else:
|
||||
return 'Linux'
|
||||
|
||||
else:
|
||||
return '%s %s' % (_plat.system(), _plat.release())
|
||||
|
||||
|
||||
def _list_files(root):
|
||||
"""
|
||||
Lists all of the files in a directory, taking into account any .gitignore
|
||||
file that is present
|
||||
|
||||
:param root:
|
||||
A unicode filesystem path
|
||||
|
||||
:return:
|
||||
A list of unicode strings, containing paths of all files not ignored
|
||||
by .gitignore with root, using relative paths
|
||||
"""
|
||||
|
||||
dir_patterns, file_patterns = _gitignore(root)
|
||||
paths = []
|
||||
prefix = os.path.abspath(root) + os.sep
|
||||
for base, dirs, files in os.walk(root):
|
||||
for d in dirs:
|
||||
for dir_pattern in dir_patterns:
|
||||
if fnmatch(d, dir_pattern):
|
||||
dirs.remove(d)
|
||||
break
|
||||
for f in files:
|
||||
skip = False
|
||||
for file_pattern in file_patterns:
|
||||
if fnmatch(f, file_pattern):
|
||||
skip = True
|
||||
break
|
||||
if skip:
|
||||
continue
|
||||
full_path = os.path.join(base, f)
|
||||
if full_path[:len(prefix)] == prefix:
|
||||
full_path = full_path[len(prefix):]
|
||||
paths.append(full_path)
|
||||
return sorted(paths)
|
||||
|
||||
|
||||
def _gitignore(root):
|
||||
"""
|
||||
Parses a .gitignore file and returns patterns to match dirs and files.
|
||||
Only basic gitignore patterns are supported. Pattern negation, ** wildcards
|
||||
and anchored patterns are not currently implemented.
|
||||
|
||||
:param root:
|
||||
A unicode string of the path to the git repository
|
||||
|
||||
:return:
|
||||
A 2-element tuple:
|
||||
- 0: a list of unicode strings to match against dirs
|
||||
- 1: a list of unicode strings to match against dirs and files
|
||||
"""
|
||||
|
||||
gitignore_path = os.path.join(root, '.gitignore')
|
||||
|
||||
dir_patterns = ['.git']
|
||||
file_patterns = []
|
||||
|
||||
if not os.path.exists(gitignore_path):
|
||||
return (dir_patterns, file_patterns)
|
||||
|
||||
with open(gitignore_path, 'r', encoding='utf-8') as f:
|
||||
for line in f.readlines():
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
if line.startswith('#'):
|
||||
continue
|
||||
if '**' in line:
|
||||
raise NotImplementedError('gitignore ** wildcards are not implemented')
|
||||
if line.startswith('!'):
|
||||
raise NotImplementedError('gitignore pattern negation is not implemented')
|
||||
if line.startswith('/'):
|
||||
raise NotImplementedError('gitignore anchored patterns are not implemented')
|
||||
if line.startswith('\\#'):
|
||||
line = '#' + line[2:]
|
||||
if line.startswith('\\!'):
|
||||
line = '!' + line[2:]
|
||||
if line.endswith('/'):
|
||||
dir_patterns.append(line[:-1])
|
||||
else:
|
||||
file_patterns.append(line)
|
||||
|
||||
return (dir_patterns, file_patterns)
|
||||
|
||||
|
||||
def _do_request(method, url, headers, data=None, query_params=None, timeout=20):
|
||||
"""
|
||||
Performs an HTTP request
|
||||
|
||||
:param method:
|
||||
A unicode string of 'POST' or 'PUT'
|
||||
|
||||
:param url;
|
||||
A unicode string of the URL to request
|
||||
|
||||
:param headers:
|
||||
A dict of unicode strings, where keys are header names and values are
|
||||
the header values.
|
||||
|
||||
:param data:
|
||||
A dict of unicode strings (to be encoded as
|
||||
application/x-www-form-urlencoded), or a byte string of data.
|
||||
|
||||
:param query_params:
|
||||
A dict of unicode keys and values to pass as query params
|
||||
|
||||
:param timeout:
|
||||
An integer number of seconds to use as the timeout
|
||||
|
||||
:return:
|
||||
A 3-element tuple:
|
||||
- 0: A unicode string of the response content-type
|
||||
- 1: A unicode string of the response encoding, or None
|
||||
- 2: A byte string of the response body
|
||||
"""
|
||||
|
||||
if query_params:
|
||||
url += '?' + urlencode(query_params).replace('+', '%20')
|
||||
|
||||
if isinstance(data, dict):
|
||||
data_bytes = {}
|
||||
for key in data:
|
||||
data_bytes[key.encode('utf-8')] = data[key].encode('utf-8')
|
||||
data = urlencode(data_bytes)
|
||||
headers['Content-Type'] = 'application/x-www-form-urlencoded'
|
||||
if isinstance(data, str_cls):
|
||||
raise TypeError('data must be a byte string')
|
||||
|
||||
try:
|
||||
tempfd, tempf_path = tempfile.mkstemp('-coverage')
|
||||
os.write(tempfd, data or b'')
|
||||
os.close(tempfd)
|
||||
|
||||
if sys.platform == 'win32':
|
||||
powershell_exe = os.path.join('system32\\WindowsPowerShell\\v1.0\\powershell.exe')
|
||||
code = "[System.Net.ServicePointManager]::SecurityProtocol = [System.Net.SecurityProtocolType]::Tls12;"
|
||||
code += "$wc = New-Object Net.WebClient;"
|
||||
for key in headers:
|
||||
code += "$wc.Headers.add('%s','%s');" % (key, headers[key])
|
||||
code += "$out = $wc.UploadFile('%s', '%s', '%s');" % (url, method, tempf_path)
|
||||
code += "[System.Text.Encoding]::GetEncoding('ISO-8859-1').GetString($wc.ResponseHeaders.ToByteArray())"
|
||||
|
||||
# To properly obtain bytes, we use BitConverter to get hex dash
|
||||
# encoding (e.g. AE-09-3F) and they decode in python
|
||||
code += " + [System.BitConverter]::ToString($out);"
|
||||
stdout, stderr = _execute(
|
||||
[powershell_exe, '-Command', code],
|
||||
os.getcwd(),
|
||||
re.compile(r'Unable to connect to|TLS|Internal Server Error'),
|
||||
6
|
||||
)
|
||||
if stdout[-2:] == b'\r\n' and b'\r\n\r\n' in stdout:
|
||||
# An extra trailing crlf is added at the end by powershell
|
||||
stdout = stdout[0:-2]
|
||||
parts = stdout.split(b'\r\n\r\n', 1)
|
||||
if len(parts) == 2:
|
||||
stdout = parts[0] + b'\r\n\r\n' + codecs.decode(parts[1].replace(b'-', b''), 'hex_codec')
|
||||
|
||||
else:
|
||||
args = [
|
||||
'curl',
|
||||
'--http1.1',
|
||||
'--connect-timeout', '5',
|
||||
'--request',
|
||||
method,
|
||||
'--location',
|
||||
'--silent',
|
||||
'--show-error',
|
||||
'--include',
|
||||
# Prevent curl from asking for an HTTP "100 Continue" response
|
||||
'--header', 'Expect:'
|
||||
]
|
||||
for key in headers:
|
||||
args.append('--header')
|
||||
args.append("%s: %s" % (key, headers[key]))
|
||||
args.append('--data-binary')
|
||||
args.append('@%s' % tempf_path)
|
||||
args.append(url)
|
||||
stdout, stderr = _execute(
|
||||
args,
|
||||
os.getcwd(),
|
||||
re.compile(r'Failed to connect to|TLS|SSLRead|outstanding|cleanly|timed out'),
|
||||
6
|
||||
)
|
||||
finally:
|
||||
if tempf_path and os.path.exists(tempf_path):
|
||||
os.remove(tempf_path)
|
||||
|
||||
if len(stderr) > 0:
|
||||
raise URLError("Error %sing %s:\n%s" % (method, url, stderr))
|
||||
|
||||
parts = stdout.split(b'\r\n\r\n', 1)
|
||||
if len(parts) != 2:
|
||||
raise URLError("Error %sing %s, response data malformed:\n%s" % (method, url, stdout))
|
||||
header_block, body = parts
|
||||
|
||||
content_type_header = None
|
||||
content_len_header = None
|
||||
for hline in header_block.decode('iso-8859-1').splitlines():
|
||||
hline_parts = hline.split(':', 1)
|
||||
if len(hline_parts) != 2:
|
||||
continue
|
||||
name, val = hline_parts
|
||||
name = name.strip().lower()
|
||||
val = val.strip()
|
||||
if name == 'content-type':
|
||||
content_type_header = val
|
||||
if name == 'content-length':
|
||||
content_len_header = val
|
||||
|
||||
if content_type_header is None and content_len_header != '0':
|
||||
raise URLError("Error %sing %s, no content-type header:\n%s" % (method, url, stdout))
|
||||
|
||||
if content_type_header is None:
|
||||
content_type = 'text/plain'
|
||||
encoding = 'utf-8'
|
||||
else:
|
||||
content_type, params = cgi.parse_header(content_type_header)
|
||||
encoding = params.get('charset')
|
||||
|
||||
return (content_type, encoding, body)
|
||||
|
||||
|
||||
def _execute(params, cwd, retry=None, retries=0, backoff=2):
|
||||
"""
|
||||
Executes a subprocess
|
||||
|
||||
:param params:
|
||||
A list of the executable and arguments to pass to it
|
||||
|
||||
:param cwd:
|
||||
The working directory to execute the command in
|
||||
|
||||
:param retry:
|
||||
If this string is present in stderr, or regex pattern matches stderr, retry the operation
|
||||
|
||||
:param retries:
|
||||
An integer number of times to retry
|
||||
|
||||
:return:
|
||||
A 2-element tuple of (stdout, stderr)
|
||||
"""
|
||||
|
||||
proc = subprocess.Popen(
|
||||
params,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
cwd=cwd
|
||||
)
|
||||
stdout, stderr = proc.communicate()
|
||||
code = proc.wait()
|
||||
if code != 0:
|
||||
if retry and retries > 0:
|
||||
stderr_str = stderr.decode('utf-8')
|
||||
if isinstance(retry, Pattern):
|
||||
if retry.search(stderr_str) is not None:
|
||||
time.sleep(backoff)
|
||||
return _execute(params, cwd, retry, retries - 1, backoff * 2)
|
||||
elif retry in stderr_str:
|
||||
time.sleep(backoff)
|
||||
return _execute(params, cwd, retry, retries - 1, backoff * 2)
|
||||
e = OSError('subprocess exit code for "%s" was %d: %s' % (' '.join(params), code, stderr))
|
||||
e.stdout = stdout
|
||||
e.stderr = stderr
|
||||
raise e
|
||||
return (stdout, stderr)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
_codecov_submit()
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user