Calibre configuration files

This commit is contained in:
RealStickman 2022-08-01 15:04:05 +02:00
parent eed146f01c
commit f8029d15d0
545 changed files with 260130 additions and 0 deletions

View File

@ -0,0 +1,8 @@
json:{
"prefer_author_sort": false,
"toc_title": null,
"mobi_toc_at_start": false,
"dont_compress": false,
"no_inline_toc": false,
"share_not_sync": false
}

View File

@ -0,0 +1,17 @@
json:{
"colors": 0,
"dont_normalize": false,
"keep_aspect_ratio": false,
"right2left": false,
"despeckle": false,
"no_sort": false,
"no_process": false,
"landscape": false,
"dont_sharpen": false,
"disable_trim": false,
"wide": false,
"output_format": "png",
"dont_grayscale": false,
"comic_image_size": null,
"dont_add_comic_pages_to_toc": false
}

View File

@ -0,0 +1,5 @@
json:{
"docx_no_cover": false,
"docx_no_pagebreaks_between_notes": false,
"docx_inline_subsup": false
}

View File

@ -0,0 +1,11 @@
json:{
"docx_page_size": "letter",
"docx_custom_page_size": null,
"docx_no_cover": false,
"docx_no_toc": false,
"docx_page_margin_left": 72.0,
"docx_page_margin_top": 72.0,
"docx_page_margin_right": 72.0,
"docx_page_margin_bottom": 72.0,
"preserve_cover_aspect_ratio": false
}

View File

@ -0,0 +1,12 @@
json:{
"dont_split_on_page_breaks": false,
"flow_size": 0,
"no_default_epub_cover": false,
"no_svg_cover": false,
"epub_inline_toc": false,
"epub_toc_at_end": false,
"toc_title": null,
"preserve_cover_aspect_ratio": false,
"epub_flatten": false,
"epub_version": "3"
}

View File

@ -0,0 +1,3 @@
json:{
"no_inline_fb2_toc": false
}

View File

@ -0,0 +1,4 @@
json:{
"sectionize": "files",
"fb2_genre": "antique"
}

View File

@ -0,0 +1,13 @@
json:{
"enable_heuristics": false,
"markup_chapter_headings": true,
"italicize_common_cases": true,
"fix_indents": true,
"html_unwrap_factor": 0.4,
"unwrap_lines": true,
"delete_blank_paragraphs": true,
"format_scene_breaks": true,
"replace_scene_breaks": "",
"dehyphenate": true,
"renumber_headings": true
}

View File

@ -0,0 +1,5 @@
json:{
"htmlz_css_type": "class",
"htmlz_class_style": "external",
"htmlz_title_filename": false
}

View File

@ -0,0 +1,3 @@
json:{
"allow_conversion_with_errors": true
}

View File

@ -0,0 +1,26 @@
json:{
"change_justification": "original",
"extra_css": null,
"base_font_size": 0.0,
"font_size_mapping": null,
"line_height": 0.0,
"minimum_line_height": 120.0,
"embed_font_family": null,
"embed_all_fonts": false,
"subset_embedded_fonts": false,
"smarten_punctuation": false,
"unsmarten_punctuation": false,
"disable_font_rescaling": false,
"insert_blank_line": false,
"remove_paragraph_spacing": false,
"remove_paragraph_spacing_indent_size": 1.5,
"insert_blank_line_size": 0.5,
"input_encoding": null,
"filter_css": "",
"expand_css": false,
"asciiize": false,
"keep_ligatures": false,
"linearize_tables": false,
"transform_css_rules": "[]",
"transform_html_rules": "[]"
}

View File

@ -0,0 +1,13 @@
json:{
"wordspace": 2.5,
"header": false,
"header_format": "%t by %a",
"minimum_indent": 0.0,
"serif_family": null,
"render_tables_as_images": false,
"sans_family": null,
"mono_family": null,
"text_size_multiplier_for_rendered_tables": 1.0,
"autorotation": false,
"header_separation": 0.0
}

View File

@ -0,0 +1,12 @@
json:{
"prefer_author_sort": false,
"toc_title": null,
"mobi_keep_original_images": false,
"mobi_ignore_margins": false,
"mobi_toc_at_start": false,
"dont_compress": false,
"no_inline_toc": false,
"share_not_sync": false,
"personal_doc": "[PDOC]",
"mobi_file_type": "old"
}

View File

@ -0,0 +1,8 @@
json:{
"margin_top": 5.0,
"margin_left": 5.0,
"margin_right": 5.0,
"margin_bottom": 5.0,
"input_profile": "default",
"output_profile": "tablet"
}

View File

@ -0,0 +1,5 @@
json:{
"format": "doc",
"inline_toc": false,
"pdb_output_encoding": "cp1252"
}

View File

@ -0,0 +1,4 @@
json:{
"no_images": false,
"unwrap_factor": 0.45
}

View File

@ -0,0 +1,26 @@
json:{
"use_profile_size": false,
"paper_size": "letter",
"custom_size": null,
"pdf_hyphenate": false,
"preserve_cover_aspect_ratio": false,
"pdf_serif_family": "Nimbus Roman",
"unit": "inch",
"pdf_sans_family": "Nimbus Sans [UKWN]",
"pdf_mono_family": "Nimbus Mono PS",
"pdf_standard_font": "serif",
"pdf_default_font_size": 20,
"pdf_mono_font_size": 16,
"pdf_page_numbers": false,
"pdf_footer_template": null,
"pdf_header_template": null,
"pdf_add_toc": false,
"toc_title": null,
"pdf_page_margin_left": 72.0,
"pdf_page_margin_top": 72.0,
"pdf_page_margin_right": 72.0,
"pdf_page_margin_bottom": 72.0,
"pdf_use_document_margins": false,
"pdf_page_number_map": null,
"pdf_odd_even_offset": 0.0
}

View File

@ -0,0 +1,5 @@
json:{
"inline_toc": false,
"full_image_depth": false,
"pml_output_encoding": "cp1252"
}

View File

@ -0,0 +1,3 @@
json:{
"inline_toc": false
}

View File

@ -0,0 +1,3 @@
json:{
"ignore_wmf": false
}

View File

@ -0,0 +1,9 @@
json:{
"search_replace": "[]",
"sr1_search": null,
"sr1_replace": null,
"sr2_search": null,
"sr2_replace": null,
"sr3_search": null,
"sr3_replace": null
}

View File

@ -0,0 +1,6 @@
json:{
"snb_insert_empty_line": false,
"snb_dont_indent_first_line": false,
"snb_hide_chapter_name": false,
"snb_full_screen": false
}

View File

@ -0,0 +1,9 @@
json:{
"chapter": "//*[((name()='h1' or name()='h2') and re:test(., '\\s*((chapter|book|section|part)\\s+)|((prolog|prologue|epilogue)(\\s+|$))', 'i')) or @class = 'chapter']",
"chapter_mark": "pagebreak",
"start_reading_at": null,
"remove_first_image": false,
"remove_fake_margins": true,
"insert_metadata": false,
"page_breaks_before": "//*[name()='h1' or name()='h2']"
}

View File

@ -0,0 +1,11 @@
json:{
"level1_toc": null,
"level2_toc": null,
"level3_toc": null,
"toc_threshold": 6,
"max_toc_links": 50,
"no_chapters_in_toc": false,
"use_auto_toc": false,
"toc_filter": null,
"duplicate_links_in_toc": false
}

View File

@ -0,0 +1,7 @@
json:{
"paragraph_type": "auto",
"formatting_type": "auto",
"markdown_extensions": "footnotes, tables, toc",
"preserve_spaces": false,
"txt_in_remove_indents": false
}

View File

@ -0,0 +1,11 @@
json:{
"newline": "system",
"max_line_length": 0,
"force_max_line_length": false,
"inline_toc": false,
"txt_output_formatting": "plain",
"keep_links": false,
"keep_image_references": false,
"keep_color": false,
"txt_output_encoding": "utf-8"
}

View File

@ -0,0 +1,11 @@
json:{
"newline": "system",
"max_line_length": 0,
"force_max_line_length": false,
"inline_toc": false,
"txt_output_formatting": "plain",
"keep_links": false,
"keep_image_references": false,
"keep_color": false,
"txt_output_encoding": "utf-8"
}

View File

@ -0,0 +1,20 @@
{
"disabled_plugins": {
"__class__": "set",
"__value__": []
},
"enabled_plugins": {
"__class__": "set",
"__value__": [
"DeDRM"
]
},
"filetype_mapping": {},
"plugin_customization": {},
"plugins": {
"DeACSM": "/home/marc/.config/calibre/plugins/DeACSM.zip",
"DeDRM": "/home/marc/.config/calibre/plugins/DeDRM.zip",
"KFX Input": "/home/marc/.config/calibre/plugins/KFX Input.zip",
"Obok DeDRM": "/home/marc/.config/calibre/plugins/Obok DeDRM.zip"
}
}

View File

@ -0,0 +1,42 @@
{
"DeDRMexport_Kindle_for_Mac_and_PC_Key_keys": "/home/marc/Downloads/default_key.k4i",
"DeDRMimport_Adobe_Digital_Editions_Key_keys": "/home/marc/Nextcloud/backups",
"Export ADE activation files": "/home/marc/Nextcloud/backups/adobe_account_backup_uuid_2d6cfbec-33fd-43ca-bcf9-e8b281114a17.zip",
"Export ADE keys": "/home/marc/Nextcloud/backups/adobe_uuid_2d6cfbec-33fd-43ca-bcf9-e8b281114a17.der",
"add a plugin dialog": "/home/marc/Downloads",
"add books dialog dir": "/home/marc/Downloads",
"add books dialog dir-last-used-filter-spec-all-files": false,
"database location dialog": "/home/marc/Nextcloud/Books",
"library_delete_books_again": false,
"notified-version-updates": {
"__class__": "set",
"__value__": [
"5.25",
"6.0",
"5.28",
"5.24"
]
},
"recursive book import root dir dialog": "/home/marc/Nextcloud/Books/Unterhaltung",
"save to disk dialog": "/home/marc/Downloads",
"sort_history": [
[
"title",
true
],
[
"series",
true
],
[
"title",
true
],
[
"timestamp",
false
]
],
"welcome_wizard_device": "default",
"welcome_wizard_was_run": true
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,51 @@
{
"add_formats_to_existing": false,
"case_sensitive": false,
"check_for_dupes_on_ctl": false,
"database_path": "/home/marc/library1.db",
"filename_pattern": "(?P<title>.+) - (?P<author>[^_]+)",
"input_format_order": [
"EPUB",
"AZW3",
"MOBI",
"LIT",
"PRC",
"FB2",
"HTML",
"HTM",
"XHTM",
"SHTML",
"XHTML",
"ZIP",
"DOCX",
"ODT",
"RTF",
"PDF",
"TXT"
],
"installation_uuid": "95258752-0a69-416a-90ff-c20df0267b24",
"isbndb_com_key": "",
"language": "de",
"library_path": "/home/marc/Calibre-Bibliothek",
"limit_search_columns": false,
"limit_search_columns_to": [
"title",
"authors",
"tags",
"series",
"publisher"
],
"manage_device_metadata": "manual",
"mark_new_books": false,
"migrated": false,
"network_timeout": 5,
"new_book_tags": [],
"numeric_collation": false,
"output_format": "epub",
"read_file_metadata": true,
"saved_searches": {},
"swap_author_names": false,
"use_primary_find_in_search": true,
"user_categories": {},
"worker_process_priority": "normal"
}

View File

@ -0,0 +1,274 @@
{
"Plugin Updater plugin:plugin updater dialog": {
"__class__": "bytearray",
"__value__": "AdnQywADAAAAAAHDAAABFQAABdUAAAMKAAABxQAAARcAAAXTAAADCAAAAAAAAAAAB4AAAAHFAAABFwAABdMAAAMI"
},
"action-layout-toolbar": [
"Add Books",
"Edit Metadata",
null,
"Convert Books",
"View",
null,
"Store",
"Donate",
"Fetch News",
"Help",
null,
"Remove Books",
"Choose Library",
"Save To Disk",
"Connect Share",
"Tweak ePub",
"Preferences",
"Obok DeDRM"
],
"action-layout-toolbar-device": [
"Add Books",
"Edit Metadata",
null,
"Convert Books",
"View",
"Send To Device",
null,
null,
"Location Manager",
null,
null,
"Fetch News",
"Save To Disk",
"Store",
"Connect Share",
null,
"Remove Books",
null,
"Help",
"Preferences",
"Obok DeDRM"
],
"advanced search dialog current tab": 3,
"advanced_search_template_tab_program_field": "",
"advanced_search_template_tab_test_field": "0",
"advanced_search_template_tab_value_field": "",
"ask_to_manage_device": [
"9d273cd5"
],
"basic_metadata_widget_splitter_state": {
"__class__": "bytearray",
"__value__": "AAAA/wAAAAEAAAADAAAA6wAAAZwAAAEKAf////8BAAAAAQA="
},
"book_details_splitter_horizontal_state": [
true,
200
],
"book_info_dialog_layout": [
{
"__class__": "bytearray",
"__value__": "AdnQywADAAAAAAH3AAAAJAAABYgAAAP7AAAB+QAAACYAAAWGAAAD+QAAAAAAAAAAB4AAAAH5AAAAJgAABYYAAAP5"
},
{
"__class__": "bytearray",
"__value__": "AAAA/wAAAAEAAAACAAABXgAAAV4B/////wEAAAABAA=="
}
],
"book_list_pin_splitter_state": {
"__class__": "bytearray",
"__value__": "AAAA/wAAAAEAAAACAAABAAAAAEYA/////wEAAAABAA=="
},
"bulk_metadata_window_geometry": {
"__class__": "bytearray",
"__value__": "AdnQywADAAAAAAAbAAAAEgAAB1gAAAQbAAAAHQAAABQAAAdWAAAEGQAAAAAAAAAAB4AAAAAdAAAAFAAAB1YAAAQZ"
},
"bulk_metadata_window_tab": 0,
"convert_bulk_dialog_geom": {
"__class__": "bytearray",
"__value__": "AdnQywADAAAAAAASAAAAJAAAB08AAAP7AAAAFAAAACYAAAdNAAAD+QAAAAAAAAAAB4AAAAAUAAAAJgAAB00AAAP5"
},
"convert_single_dialog_geom": {
"__class__": "bytearray",
"__value__": "AdnQywADAAAAAAAsAAAAEgAAB2kAAAPpAAAALgAAABQAAAdnAAAD5wAAAAAAAAAAB4AAAAAuAAAAFAAAB2cAAAPn"
},
"cover_browser_splitter_vertical_state": [
false,
300
],
"custom_colors_for_color_dialog": [
[
255,
255,
255,
255
],
[
255,
255,
255,
255
],
[
255,
255,
255,
255
],
[
255,
255,
255,
255
],
[
255,
255,
255,
255
],
[
255,
255,
255,
255
],
[
255,
255,
255,
255
],
[
255,
255,
255,
255
],
[
255,
255,
255,
255
],
[
255,
255,
255,
255
],
[
255,
255,
255,
255
],
[
255,
255,
255,
255
],
[
255,
255,
255,
255
],
[
255,
255,
255,
255
],
[
255,
255,
255,
255
],
[
255,
255,
255,
255
]
],
"diff_dialog_geom": {
"__class__": "bytearray",
"__value__": "AdnQywADAAAAAAJnAAAAJgAABv8AAAP9AAACaQAAACgAAAb9AAAD+wAAAAAAAAAAB4AAAAJpAAAAKAAABv0AAAP7"
},
"duplicates-question-dialog-geometry": {
"__class__": "bytearray",
"__value__": "AdnQywADAAAAAACGAAABTAAAA2AAAALPAAAAiAAAAU4AAANeAAACzQAAAAAAAAAAB4AAAACIAAABTgAAA14AAALN"
},
"grid view visible": false,
"jobs view column layout3": {
"__class__": "bytearray",
"__value__": "AAAA/wAAAAAAAAABAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA04AAAAFAQEBAAAAAAAAAAAAAAAAAGT/////AAAAhAAAAAAAAAAFAAABRAAAAAEAAAAAAAAAtgAAAAEAAAAAAAAAjAAAAAEAAAAAAAAAZAAAAAEAAAAAAAAAZAAAAAEAAAAAAAAD6AAAAAAA"
},
"jobs_dialog_geometry": {
"__class__": "bytearray",
"__value__": "AdnQywADAAAAAAGYAAABEQAABQ4AAAMyAAABmgAAARMAAAUMAAADMAAAAAAAAAAAB4AAAAGaAAABEwAABQwAAAMw"
},
"library_usage_stats": {
"/home/marc/Calibre-Bibliothek": 184
},
"metadata-download-identify-widget-splitter-state": {
"__class__": "bytearray",
"__value__": "AAAA/wAAAAEAAAACAAACAAAAAQAA/////wEAAAABAA=="
},
"metadata_single_gui_geom": {
"__class__": "bytearray",
"__value__": "AdnQywADAAAAAAIiAAAA6QAABXcAAANEAAACJAAAAOsAAAV1AAADQgAAAAAAAAAAB4AAAAIkAAAA6wAABXUAAANC"
},
"metasingle_window_geometry3": {
"__class__": "bytearray",
"__value__": "AdnQywADAAAAAAA8AAAAEgAAB3kAAAQbAAAAPgAAABQAAAd3AAAEGQAAAAAAAAAAB4AAAAA+AAAAFAAAB3cAAAQZ"
},
"plugin config dialog:Dateityp:DeACSM": {
"__class__": "bytearray",
"__value__": "AdnQywADAAAAAAMiAAABRAAABKAAAAL9AAADJAAAAUYAAASeAAAC+wAAAAAAAAAAB4AAAAMkAAABRgAABJ4AAAL7"
},
"plugin config dialog:Dateityp:DeDRM": {
"__class__": "bytearray",
"__value__": "AdnQywADAAAAAAP8AAAApAAABRYAAAKaAAAD/gAAAKYAAAUUAAACmAAAAAAAAAAAB4AAAAP+AAAApgAABRQAAAKY"
},
"preferences dialog geometry": {
"__class__": "bytearray",
"__value__": "AdnQywADAAAAAAHtAAAApgAABZIAAAN5AAAB7wAAAKgAAAWQAAADdwAAAAAAAAAAB4AAAAHvAAAAqAAABZAAAAN3"
},
"previous_sort_order_BooksView": {
"authors": true,
"rating": true,
"series": true,
"title": true
},
"quick_start_guide_added": true,
"quickview visible": false,
"recently_used_languages": [
"Englisch"
],
"replace_scene_breaks_history": [
"",
"<hr />",
" ",
"• • •",
"♦ ♦ ♦",
"† †",
"‡ ‡ ‡",
"∞ ∞ ∞",
"¤ ¤ ¤"
],
"search bar visible": true,
"single-cover-fetch-dialog-geometry": {
"__class__": "bytearray",
"__value__": "AdnQywADAAAAAAIhAAAA5wAABXYAAANCAAACIwAAAOkAAAV0AAADQAAAAAAAAAAAB4AAAAIjAAAA6QAABXQAAANA"
},
"tag browser search box visible": false,
"tag_browser_splitter_horizontal_state": [
true,
200
],
"tag_editor_geometry": {
"__class__": "bytearray",
"__value__": "AdnQywADAAAAAAKTAAAAyAAABRYAAANlAAAClQAAAMoAAAUUAAADYwAAAAAAAAAAB4AAAAKVAAAAygAABRQAAANj"
},
"tag_editor_last_filter": "add_tag_input"
}

View File

@ -0,0 +1,78 @@
{
"LRF_conversion_defaults": [],
"LRF_ebook_viewer_options": null,
"asked_library_thing_password": false,
"auto_download_cover": false,
"autolaunch_server": false,
"column_map": [
"title",
"ondevice",
"authors",
"size",
"timestamp",
"rating",
"publisher",
"tags",
"series",
"pubdate"
],
"confirm_delete": false,
"cover_flow_queue_length": 6,
"default_send_to_device_action": "DeviceAction:main::False:False",
"delete_news_from_library_on_upload": false,
"disable_animations": false,
"disable_tray_notification": false,
"enforce_cpu_limit": true,
"get_social_metadata": true,
"gui_layout": "wide",
"highlight_search_matches": false,
"internally_viewed_formats": [
"LRF",
"EPUB",
"LIT",
"MOBI",
"PRC",
"POBI",
"AZW",
"AZW3",
"HTML",
"FB2",
"PDB",
"RB",
"SNB",
"HTMLZ",
"KEPUB"
],
"jobs_search_history": [],
"lrf_viewer_search_history": [],
"main_search_history": [],
"main_window_geometry": {
"__class__": "bytearray",
"__value__": "AdnQywADAAAAAAAAAAAAAAAAB38AAAQjAAAAAgAAAAIAAAd9AAAEIQAAAAAAAAAAB4AAAAACAAAAAgAAB30AAAQh"
},
"match_tags_type": "any",
"new_version_notification": true,
"oldest_news": 60,
"overwrite_author_title_metadata": true,
"plugin_search_history": [],
"save_to_disk_template_history": [],
"scheduler_search_history": [],
"search_as_you_type": false,
"send_to_device_template_history": [],
"send_to_storage_card_by_default": false,
"separate_cover_flow": false,
"shortcuts_search_history": [],
"show_avg_rating": true,
"sort_tags_by": "name",
"systray_icon": false,
"tag_browser_hidden_categories": {
"__class__": "set",
"__value__": []
},
"tweaks_search_history": [],
"upload_news_to_device": true,
"use_roman_numerals_for_series_number": true,
"viewer_search_history": [],
"viewer_toc_search_history": [],
"worker_limit": 6
}

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,11 @@
{
"blacklist": [
"9d273cd5"
],
"history": {
"9d273cd5": [
"ONEPLUS A3003",
"2021-05-15T18:17:07.571225+00:00"
]
}
}

Binary file not shown.

View File

@ -0,0 +1,28 @@
<?xml version="1.0"?>
<activationInfo xmlns="http://ns.adobe.com/adept">
<adept:activationServiceInfo xmlns:adept="http://ns.adobe.com/adept">
<adept:authURL>http://adeactivate.adobe.com/adept</adept:authURL>
<adept:userInfoURL>http://adeactivate.adobe.com/adept</adept:userInfoURL>
<adept:activationURL>http://adeactivate.adobe.com/adept</adept:activationURL>
<adept:certificate>MIIEsjCCA5qgAwIBAgIER2q5eDANBgkqhkiG9w0BAQUFADCBhDELMAkGA1UEBhMCVVMxIzAhBgNVBAoTGkFkb2JlIFN5c3RlbXMgSW5jb3Jwb3JhdGVkMRswGQYDVQQLExJEaWdpdGFsIFB1Ymxpc2hpbmcxMzAxBgNVBAMTKkFkb2JlIENvbnRlbnQgU2VydmVyIENlcnRpZmljYXRlIEF1dGhvcml0eTAeFw0wODAxMDkxODM3NDVaFw0xMzAxMDkxOTA3NDVaMH0xCzAJBgNVBAYTAlVTMSMwIQYDVQQKExpBZG9iZSBTeXN0ZW1zIEluY29ycG9yYXRlZDEbMBkGA1UECxMSRGlnaXRhbCBQdWJsaXNoaW5nMSwwKgYDVQQDEyNodHRwOi8vYWRlYWN0aXZhdGUuYWRvYmUuY29tL2FkZXB0LzCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAyXpCCWFh0Q3Bi1S7xf+CJfMd+cZz3HB0NknDScB1Cs8KdU0ygO7iqAgdiAdPliITkUTVEgUPvK+4yYCUderzBjq13/IrKlwEAyWeNgssJekpYgqNywo7Md1OApXzM47wVThNePNydhGYuNEEDDxzO+0JxucfhfArwnp7kIWA6q8CAwEAAaOCAbQwggGwMAsGA1UdDwQEAwIFoDBYBglghkgBhvprHgEESwxJVGhlIHByaXZhdGUga2V5IGNvcnJlc3BvbmRpbmcgdG8gdGhpcyBjZXJ0aWZpY2F0ZSBtYXkgaGF2ZSBiZWVuIGV4cG9ydGVkLjAUBgNVHSUEDTALBgkqhkiG9y8CAQQwgbIGA1UdIASBqjCBpzCBpAYJKoZIhvcvAQIDMIGWMIGTBggrBgEFBQcCAjCBhhqBg1lvdSBhcmUgbm90IHBlcm1pdHRlZCB0byB1c2UgdGhpcyBMaWNlbnNlIENlcnRpZmljYXRlIGV4Y2VwdCBhcyBwZXJtaXR0ZWQgYnkgdGhlIGxpY2Vuc2UgYWdyZWVtZW50IGFjY29tcGFueWluZyB0aGUgQWRvYmUgc29mdHdhcmUuMDEGA1UdHwQqMCgwJqAkoCKGIGh0dHA6Ly9jcmwuYWRvYmUuY29tL2Fkb2JlQ1MuY3JsMB8GA1UdIwQYMBaAFIvu8IFgyaLaHg5SwVgMBLBD94/oMB0GA1UdDgQWBBT9A+kXOPL6N57MN/zovbCGEx2+BTAJBgNVHRMEAjAAMA0GCSqGSIb3DQEBBQUAA4IBAQBVjUalliql3VjpLdT8si7OwPU1wQODllwlgfLH7tI/Ubq5wHDlprGtbf3jZm6tXY1qmh9mz1WnTmQHU3uPk8qgpihrpx4HJTjhAhLP0CXU1rd/t5whwhgT1lYfw77RRG2lZ5BzpHb/XjnY5yc3awd6F3Dli6kTkbcPyOCNoXlW4wiF+jkL+jBImY8xo2EewiJioY/iTYZH5HF+PjHF5mffANiLK/Q43l4f0YF8UagTfAJkD3iQV9lrTOWxKBgpfdyvekGqFCDq9AKzfpllqctxsC29W5bXU0cVYzf6Bj5ALs6tyi7r5fsIPSwszH/i4ixsuD0qccIgTXCwMNbt9zQu</adept:certificate>
<adept:authenticationCertificate>MIIEYDCCA0igAwIBAgIER2q5eTANBgkqhkiG9w0BAQUFADCBhDELMAkGA1UEBhMCVVMxIzAhBgNVBAoTGkFkb2JlIFN5c3RlbXMgSW5jb3Jwb3JhdGVkMRswGQYDVQQLExJEaWdpdGFsIFB1Ymxpc2hpbmcxMzAxBgNVBAMTKkFkb2JlIENvbnRlbnQgU2VydmVyIENlcnRpZmljYXRlIEF1dGhvcml0eTAeFw0wODAxMDkxODQzNDNaFw0xODAxMzEwODAwMDBaMHwxKzApBgNVBAMTImh0dHA6Ly9hZGVhY3RpdmF0ZS5hZG9iZS5jb20vYWRlcHQxGzAZBgNVBAsTEkRpZ2l0YWwgUHVibGlzaGluZzEjMCEGA1UEChMaQWRvYmUgU3lzdGVtcyBJbmNvcnBvcmF0ZWQxCzAJBgNVBAYTAlVTMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDZAxpzOZ7N38ZGlQjfMY/lfu4Ta4xK3FRm069VwdqGZIwrfTTRxnLE4A9i1X00BnNk/5z7C0pQX435ylIEQPxIFBKTH+ip5rfDNh/Iu6cIlB0N4I/t7Pac8cIDwbc9HxcGTvXg3BFqPjaGVbmVZmoUtSVOsphdA43sZc6j1iFfOQIDAQABo4IBYzCCAV8wEgYDVR0TAQH/BAgwBgEB/wIBATAUBgNVHSUEDTALBgkqhkiG9y8CAQUwgbIGA1UdIASBqjCBpzCBpAYJKoZIhvcvAQIDMIGWMIGTBggrBgEFBQcCAjCBhhqBg1lvdSBhcmUgbm90IHBlcm1pdHRlZCB0byB1c2UgdGhpcyBMaWNlbnNlIENlcnRpZmljYXRlIGV4Y2VwdCBhcyBwZXJtaXR0ZWQgYnkgdGhlIGxpY2Vuc2UgYWdyZWVtZW50IGFjY29tcGFueWluZyB0aGUgQWRvYmUgc29mdHdhcmUuMDEGA1UdHwQqMCgwJqAkoCKGIGh0dHA6Ly9jcmwuYWRvYmUuY29tL2Fkb2JlQ1MuY3JsMAsGA1UdDwQEAwIBBjAfBgNVHSMEGDAWgBSL7vCBYMmi2h4OUsFYDASwQ/eP6DAdBgNVHQ4EFgQU9RP19K+lzF03he+0T47hCVkPhdAwDQYJKoZIhvcNAQEFBQADggEBAJoqOj+bUa+bDYyOSljs6SVzWH2BN2ylIeZKpTQYEo7jA62tRqW/rBZcNIgCudFvEYa7vH8lHhvQak1s95g+NaNidb5tpgbS8Q7/XTyEGS/4Q2HYWHD/8ydKFROGbMhfxpdJgkgn21mb7dbsfq5AZVGS3M4PP1xrMDYm50+Sip9QIm1RJuSaKivDa/piA5p8/cv6w44YBefLzGUN674Y7WS5u656MjdyJsN/7Oup+12fHGiye5QS5mToujGd6LpU80gfhNxhrphASiEBYQ/BUhWjHkSi0j4WOiGvGpT1Xvntcj0rf6XV6lNrOddOYUL+KdC1uDIe8PUI+naKI+nWgrs=</adept:authenticationCertificate>
</adept:activationServiceInfo>
<adept:credentials xmlns:adept="http://ns.adobe.com/adept">
<adept:user>urn:uuid:2d6cfbec-33fd-43ca-bcf9-e8b281114a17</adept:user>
<adept:username method="AdobeID">adobe@frm01.net</adept:username>
<adept:pkcs12>MIIICgIBAzCCB8MGCSqGSIb3DQEHAaCCB7QEggewMIIHrDCCA3AGCSqGSIb3DQEHAaCCA2EEggNdMIIDWTCCA1UGCyqGSIb3DQEMCgECoIICszCCAq8wKQYKKoZIhvcNAQwBAzAbBBQYnc0dHYbt/zeyPvEbYrhgbYyxgwIDAMNQBIICgFqwh/cekpmgYtd57bU3rUEJEohVOC1OrXVh3j9b8UE7RHwiI04O9D0TZtZv0y6IH4VotY/t0j71JAHpXQwVnyQgSB0zrqi7inwJ8p/xFCPvrS/brzZk4hGmSRMfaeyoaqZhTYHThAFw3Hyz7FqmqU5p8bfggSwQ/xOviU7Ct+nBAfkrpaiNfeTQdY63lMKHghYL4IwmZMs6omaVN0ngMuN4/Nhfp7Ij0FK8SmpMMsRJCXSAAk6VjxNBKlLHwgkA4C+qxdyK7LXLd5JO3lGWpR2x+mBHalYt07xaq6OuuNWQKv0Ho1o75Rv3Blibnj8dvcweb3/3aEP0G3p+BfU2bFKrb3pAn3ClsPO1JJnncYdOLmNdkUlmbHK2RIAgAIqE/aJy1QblRcN34drbbV7FMEHScMdaUf6HVTvEj2TkZRT56GxpI4nhIE9KVzzUrj03COLYkRsfkp1NhfBJC1W324Q9Qd4veQWgpAGzYrpN9KwfQsMvNAEijuV0ExYcDxhcp+8cPcCcyjvm8DICAHKCnHtyDHFTutXR6fMQ8Jphu7uz4Sm++YDXby5M+Kb4luK5u8+PDlqfJ5LGAJ54MlzqGUK0OK/NN9U76ga5ekpFeU5wq2DTCQIZ/M4QuHcaTXpme8YempZzcnFWLh5HD/HDQOddCPoxvinGcjiPxC7MWOMKvWPu/oETHNUWYvCz12O8EycWIi4a0dW6Sa0DlC2S0wBBP2lgvd41/M/CksFuqUjiww9CsBMwzbFlmv0ebi9ZKD7IHVz68s+DV3swIGVq7EKSlCGrHNbthnAeOIlk1lwRUTn09a8cRDo8tomY6cBxxdlyqN24mq0vFhwTfqO2CBgxgY4waQYJKoZIhvcNAQkUMVweWgB1AHIAbgA6AHUAdQBpAGQAOgAyAGQANgBjAGYAYgBlAGMALQAzADMAZgBkAC0ANAAzAGMAYQAtAGIAYwBmADkALQBlADgAYgAyADgAMQAxADEANABhADEANzAhBgkqhkiG9w0BCRUxFAQSVGltZSAxNjU4ODU5ODkzNjE4MIIENAYJKoZIhvcNAQcGoIIEJTCCBCECAQAwggQaBgkqhkiG9w0BBwEwKQYKKoZIhvcNAQwBBjAbBBS6J/ZgKvOxIDg+c1iftQoiDAEjXQIDAMNQgIID4Hvb0y7+KcU6CEWDRPRqY6v9CGWmZxT2Ih2P0azuqio0N3tOb+fl4vymhvcJzdGAG1wTX9c2BpZBM7hMVq1YZuM1/rpdXSnbizBYa+ZzcbbYoNU77yQujwQtcJG1JwZI+VCJMJ+rXCKP+0ebIuCmh7AvcfN2tj48h3TFIeRbeic+YjvqB7RifbnaoVkV56L4k2TeKy1yrTqE8K5I7lICEVJA/ouXWOHdpMcse4SGRZ6n3JmZnC6aCoZTwKydQ0HGW7EvkmK+b3DS8gf3SGLvDEz3k3H41oYvr8zAN40ID7a71wdKHIlDcSeLkzZt6YNZBMXeH8ZlNf3s2qiCX79lRHuKlH7GYjNoUoLlAKO1OSwluxr6qKsLizOrmj7pgledFGYW5iCxVUooXneou/fDJtpOZGVKugPmwT5hL4ouiDI7R7gzWaGDjK0EIorthBTOymwj+hMx5wJ+2g42a0UyD0n73G5e1zRE7hT5axOXULsYMiKTJMCEopRVV70fjAsInBmDOCjIzRsPKtVIrVpOVJIr0zULLqyPucnaeJHpr9sb9hojUqRRZy55wnb5L7JbNLWnRTGLApnmhTx5KmbBTo7UdpyhWUqsApPrhRf3pMXPPBzkEZgeSEp2un3lnPMNhKVpo9lH2Ox206UXlzCYtrK+i+bH1wJdgdoDXzF01ysad8hHAcZJSUNF/i09DaoaFkX3uuPRbiqt5hgYdXb3E3aA5E/ChA80jFjaMLRBbsXSwvn7Ok9LD+kEUOoFvMyz9HrTS3j7fKQRvu20fn3uzyIkUyva46WlNWP/3KGcq1fWarJAuhYgwZbw3o9LB09w87uISN71Wm5pnYpAiFUo48oiTO/PB6F9F8SPiqR8h40Ghjvp7dFm9HzyXjq14T0GBkyG8RaJn9umc4yz1vOt7wPCXyf92cDltlSstpoY+SKrECTMcmnY//b9fyjRiXU4KJ7idSeKmF6thz1SBZ7FzUSoeozPHs+WOmIrVRJDez7J766oJBstqycIq15UVAPGt09ND/WXmhxDyHfZPlrR49YW87CLTFY8MmiOgd/fr+1gTsZqxLLb7L303MKeOQ8Lb0s4bMqGuCLsW/WcniHFhezN7YwyyAsdFhrI/Ugro2uDFlZ3BgAsipaqyzc4kyfGxjkSuIarzxV7huswH8COHSd0Jf9wj8iCLQSw4gX627C9F9dNy0AqT8uwXa/Z78leKUfSbbUB06Veram5y0kXnWGM97WyYKezxpmUHXUagZb5v6zG3o62/ia43tr8YmmUsJ9eyrl3xA+7RkERta/vryBuse5wQhpjd8F/2IxHnSG0MD4wITAJBgUrDgMCGgUABBSD2oMtYGOzFgykxeJda8+qoJQ7BAQUdwkJW7jL0vGmUxZNCAB2k3wM828CAwGGoA==</adept:pkcs12>
<adept:licenseCertificate>MIIDGDCCAoGgAwIBAgIGAYI7wTNyMA0GCSqGSIb3DQEBBQUAMHwxKzApBgNVBAMTImh0dHA6Ly9hZGVhY3RpdmF0ZS5hZG9iZS5jb20vYWRlcHQxGzAZBgNVBAsTEkRpZ2l0YWwgUHVibGlzaGluZzEjMCEGA1UEChMaQWRvYmUgU3lzdGVtcyBJbmNvcnBvcmF0ZWQxCzAJBgNVBAYTAlVTMB4XDTIyMDcyNjE4MjQ1M1oXDTMyMDcyNjE4MjQ1M1owODE2MDQGA1UEAxMtdXJuOnV1aWQ6MmQ2Y2ZiZWMtMzNmZC00M2NhLWJjZjktZThiMjgxMTE0YTE3MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDf+bPePv4GMbGBRssk/3dXktw0nIou8bhWCXha1i+rRi3VpuhHkj02KgBjOaEWEZy3Xkjkv7JGedv2cT0ZShIC6wOvGTsKT2Y3IJpNgWoZzniaWPz1zAGKffe41vjhrVj+8Vbtmt0MhxUbM3uA47echS7Kg9Cp036ydHYX70EeVQIDAQABo4HoMIHlMIG0BgNVHSMEgawwgamAFPUT9fSvpcxdN4XvtE+O4QlZD4XQoYGKpIGHMIGEMQswCQYDVQQGEwJVUzEjMCEGA1UEChMaQWRvYmUgU3lzdGVtcyBJbmNvcnBvcmF0ZWQxGzAZBgNVBAsTEkRpZ2l0YWwgUHVibGlzaGluZzEzMDEGA1UEAxMqQWRvYmUgQ29udGVudCBTZXJ2ZXIgQ2VydGlmaWNhdGUgQXV0aG9yaXR5ggRHarl5MAkGA1UdEwQCMAAwFAYDVR0lBA0wCwYJKoZIhvcvAgEHMAsGA1UdDwQEAwIFIDANBgkqhkiG9w0BAQUFAAOBgQC0lvq2fT3XCu8dB4au2kdQQvMPSVPLet9cS5bF5YSmox4YhLjF8BzBojNinU7fmxAnr5DL4Z4JL2OSf70SL6BOZAZUP8LTf2m3ES5096GGLvGryqNmHIeyublHhAa4sp7ya51NpkAi/Cj765WxORTMY+sF9D92R23Jj+Y8QslG1A==</adept:licenseCertificate>
<adept:privateLicenseKey>MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBAN/5s94+/gYxsYFGyyT/d1eS3DScii7xuFYJeFrWL6tGLdWm6EeSPTYqAGM5oRYRnLdeSOS/skZ52/ZxPRlKEgLrA68ZOwpPZjcgmk2BahnOeJpY/PXMAYp997jW+OGtWP7xVu2a3QyHFRsze4Djt5yFLsqD0KnTfrJ0dhfvQR5VAgMBAAECgYBfw07xhnNsSJEBmjg/YG8xZVx7rjay7a0INFJeXFfTXlU4lX2ZJGDBqOGziy9h1TPxfwGhtIjP80hmLXKXPoFGKyTRhf1Z2QsLefX1hhpHhuWI6NxEtQiUiN4oD+igvIWQnPYkRJtth14hvOkl9wtQM6zFG1IV+8hkZf6gJ4c8gQJBAPq3K/UfSjHz1YmIo86wGU8bZHnsdo2uOX0biH3cQ20WsLv2cj6wo/DmFgVAE8hbYkW2yfrfN/ddL1skXTOHnSECQQDksj6mcZyzROW+DGC6csNEMuKVez7/DlWak4M4XwWa8wpQZPAqilNPjmrdK13Bsmxp8TrQDAJt4h/16GrWaEa1AkEAjdgQAJCBU52WVEeAFbG/v+fJgslrkWDemY94O2zgoNlTiCQ4IouhVOt3zeSgzJwXD0YJI+wiJ8sKvc/nAv5YwQJBAJVqp2gTnm+5ueh7Kc9nH5C1Nji3tybo9KDzc64m1wCvfbOc3xTMHzZBNCygIrdknVRyWRyIXCXysTL20KaYpmkCQHNYn681QtlOYC1AyMFcn/w78DmQwTDqlKIyx9oyaRJlEcq6KSeBgu1LJ0pGYq/5EGMYrp0KqMn/qXQ/1OSTY9M=</adept:privateLicenseKey>
<adept:authenticationCertificate>MIIEYDCCA0igAwIBAgIER2q5eTANBgkqhkiG9w0BAQUFADCBhDELMAkGA1UEBhMCVVMxIzAhBgNVBAoTGkFkb2JlIFN5c3RlbXMgSW5jb3Jwb3JhdGVkMRswGQYDVQQLExJEaWdpdGFsIFB1Ymxpc2hpbmcxMzAxBgNVBAMTKkFkb2JlIENvbnRlbnQgU2VydmVyIENlcnRpZmljYXRlIEF1dGhvcml0eTAeFw0wODAxMDkxODQzNDNaFw0xODAxMzEwODAwMDBaMHwxKzApBgNVBAMTImh0dHA6Ly9hZGVhY3RpdmF0ZS5hZG9iZS5jb20vYWRlcHQxGzAZBgNVBAsTEkRpZ2l0YWwgUHVibGlzaGluZzEjMCEGA1UEChMaQWRvYmUgU3lzdGVtcyBJbmNvcnBvcmF0ZWQxCzAJBgNVBAYTAlVTMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDZAxpzOZ7N38ZGlQjfMY/lfu4Ta4xK3FRm069VwdqGZIwrfTTRxnLE4A9i1X00BnNk/5z7C0pQX435ylIEQPxIFBKTH+ip5rfDNh/Iu6cIlB0N4I/t7Pac8cIDwbc9HxcGTvXg3BFqPjaGVbmVZmoUtSVOsphdA43sZc6j1iFfOQIDAQABo4IBYzCCAV8wEgYDVR0TAQH/BAgwBgEB/wIBATAUBgNVHSUEDTALBgkqhkiG9y8CAQUwgbIGA1UdIASBqjCBpzCBpAYJKoZIhvcvAQIDMIGWMIGTBggrBgEFBQcCAjCBhhqBg1lvdSBhcmUgbm90IHBlcm1pdHRlZCB0byB1c2UgdGhpcyBMaWNlbnNlIENlcnRpZmljYXRlIGV4Y2VwdCBhcyBwZXJtaXR0ZWQgYnkgdGhlIGxpY2Vuc2UgYWdyZWVtZW50IGFjY29tcGFueWluZyB0aGUgQWRvYmUgc29mdHdhcmUuMDEGA1UdHwQqMCgwJqAkoCKGIGh0dHA6Ly9jcmwuYWRvYmUuY29tL2Fkb2JlQ1MuY3JsMAsGA1UdDwQEAwIBBjAfBgNVHSMEGDAWgBSL7vCBYMmi2h4OUsFYDASwQ/eP6DAdBgNVHQ4EFgQU9RP19K+lzF03he+0T47hCVkPhdAwDQYJKoZIhvcNAQEFBQADggEBAJoqOj+bUa+bDYyOSljs6SVzWH2BN2ylIeZKpTQYEo7jA62tRqW/rBZcNIgCudFvEYa7vH8lHhvQak1s95g+NaNidb5tpgbS8Q7/XTyEGS/4Q2HYWHD/8ydKFROGbMhfxpdJgkgn21mb7dbsfq5AZVGS3M4PP1xrMDYm50+Sip9QIm1RJuSaKivDa/piA5p8/cv6w44YBefLzGUN674Y7WS5u656MjdyJsN/7Oup+12fHGiye5QS5mToujGd6LpU80gfhNxhrphASiEBYQ/BUhWjHkSi0j4WOiGvGpT1Xvntcj0rf6XV6lNrOddOYUL+KdC1uDIe8PUI+naKI+nWgrs=</adept:authenticationCertificate>
</adept:credentials>
<activationToken xmlns="http://ns.adobe.com/adept">
<device>urn:uuid:32095968-696a-46d1-95ef-e76097c33051</device>
<fingerprint>Pa7vI/H67wVERB/TsVjesFE6Kws=</fingerprint>
<deviceType>standalone</deviceType>
<activationURL>http://adeactivate.adobe.com/adept</activationURL>
<user>urn:uuid:2d6cfbec-33fd-43ca-bcf9-e8b281114a17</user>
<signature>B+Y6HxQ3o203HDb/5rSnal6Ca9tE8FEmVyiFcVpE9R7QzHo2NbpFzFHssFd2L+C7HKdFQ4pg+SFxyBLrDpLEdzILfPu+gRsDOvk/AGSisXEvdHsTFK9Yc5Cjkz8WkmWM1N6rgJ30V8AW6/d0mHj81g+Iue8VO8soBPkFwXGX1u4=</signature>
</activationToken>
<adept:operatorURLList xmlns:adept="http://ns.adobe.com/adept"><adept:user>urn:uuid:2d6cfbec-33fd-43ca-bcf9-e8b281114a17</adept:user><adept:operatorURL>https://acs4.kobo.com/fulfillment/Fulfill</adept:operatorURL></adept:operatorURLList><adept:licenseServices xmlns:adept="http://ns.adobe.com/adept"><adept:licenseServiceInfo><adept:licenseURL>https://nasigningservice.adobe.com/licensesign</adept:licenseURL><adept:certificate>MIIEvjCCA6agAwIBAgIER2q5ljANBgkqhkiG9w0BAQUFADCBhDELMAkGA1UEBhMCVVMxIzAhBgNVBAoTGkFkb2JlIFN5c3RlbXMgSW5jb3Jwb3JhdGVkMRswGQYDVQQLExJEaWdpdGFsIFB1Ymxpc2hpbmcxMzAxBgNVBAMTKkFkb2JlIENvbnRlbnQgU2VydmVyIENlcnRpZmljYXRlIEF1dGhvcml0eTAeFw0wODA4MTExNjMzNDhaFw0xMzA4MTEwNzAwMDBaMIGIMQswCQYDVQQGEwJVUzEjMCEGA1UEChMaQWRvYmUgU3lzdGVtcyBJbmNvcnBvcmF0ZWQxGzAZBgNVBAsTEkRpZ2l0YWwgUHVibGlzaGluZzE3MDUGA1UEAxMuaHR0cHM6Ly9uYXNpZ25pbmdzZXJ2aWNlLmFkb2JlLmNvbS9saWNlbnNlc2lnbjCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAs9GRZ1f5UTRySgZ2xAL7TaDKQBfdpIS9ei9Orica0N72BB/WE+82G5lfsZ2HdeCFDZG/oz2WPLXovcuUAbFKSIXVLyc7ONOd4sczeXQYPixeAvqzGtsyMArIzaeJcriGVPRnbD/spbuHR0BHhJEakIiDtQLJz+xgVYHlicx2H/kCAwEAAaOCAbQwggGwMAsGA1UdDwQEAwIFoDBYBglghkgBhvprHgEESwxJVGhlIHByaXZhdGUga2V5IGNvcnJlc3BvbmRpbmcgdG8gdGhpcyBjZXJ0aWZpY2F0ZSBtYXkgaGF2ZSBiZWVuIGV4cG9ydGVkLjAUBgNVHSUEDTALBgkqhkiG9y8CAQIwgbIGA1UdIASBqjCBpzCBpAYJKoZIhvcvAQIDMIGWMIGTBggrBgEFBQcCAjCBhhqBg1lvdSBhcmUgbm90IHBlcm1pdHRlZCB0byB1c2UgdGhpcyBMaWNlbnNlIENlcnRpZmljYXRlIGV4Y2VwdCBhcyBwZXJtaXR0ZWQgYnkgdGhlIGxpY2Vuc2UgYWdyZWVtZW50IGFjY29tcGFueWluZyB0aGUgQWRvYmUgc29mdHdhcmUuMDEGA1UdHwQqMCgwJqAkoCKGIGh0dHA6Ly9jcmwuYWRvYmUuY29tL2Fkb2JlQ1MuY3JsMB8GA1UdIwQYMBaAFIvu8IFgyaLaHg5SwVgMBLBD94/oMB0GA1UdDgQWBBSQ5K+bvggI6Rbh2u9nPhH8bcYTITAJBgNVHRMEAjAAMA0GCSqGSIb3DQEBBQUAA4IBAQC0l1L+BRCccZdb2d9zQBJ7JHkXWt1x/dUydU9I/na+QPFE5x+fGK4cRwaIfp6fNviGyvtJ6Wnxe6du/wlarC1o26UNpyWpnAltcy47LpVXsmcV5rUlhBx10l4lecuX0nx8/xF8joRz2BvvAusK+kxgKeiAjJg2W20wbJKh0Otct1ZihruQsEtGbZJ1L55xfNhrm6CKAHuGuTDYQ/S6W20dUaDUiNFhA2n2eEySLwUwgOuuhfVUPb8amQQKbF4rOQ2rdjAskEl/0CiavW6Xv0LGihThf6CjEbNSdy+vXQ7K9wFbKsE843DflpuSPfj2Aagtyrv/j1HsBjsf03e0uVu5</adept:certificate></adept:licenseServiceInfo></adept:licenseServices></activationInfo>

View File

@ -0,0 +1,11 @@
<?xml version="1.0"?>
<adept:deviceInfo xmlns:adept="http://ns.adobe.com/adept">
<adept:deviceType>standalone</adept:deviceType>
<adept:deviceClass>Desktop</adept:deviceClass>
<adept:deviceSerial>84abdfab8a0837c803a405f01b2fe493ae7b8c10</adept:deviceSerial>
<adept:deviceName>lupusregina</adept:deviceName>
<adept:version name="hobbes" value="9.3.58046"/>
<adept:version name="clientOS" value="Windows Vista"/>
<adept:version name="clientLocale" value="de"/>
<adept:fingerprint>Pa7vI/H67wVERB/TsVjesFE6Kws=</adept:fingerprint>
</adept:deviceInfo>

View File

@ -0,0 +1 @@
<EFBFBD>6(8[hK4<4B><03><>w,<2C>

View File

@ -0,0 +1,13 @@
.tox/
__pycache__/
build/
dist/
tests/output/
tmp/
*.egg-info/
*.pyc
*.pyo
.python-version
.DS_Store
.coverage
coverage.xml

View File

@ -0,0 +1,19 @@
Copyright (c) 2015-2019 Will Bond <will@wbond.net>
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -0,0 +1,47 @@
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
from .version import __version__, __version_info__
__all__ = [
'__version__',
'__version_info__',
'load_order',
]
def load_order():
"""
Returns a list of the module and sub-module names for asn1crypto in
dependency load order, for the sake of live reloading code
:return:
A list of unicode strings of module names, as they would appear in
sys.modules, ordered by which module should be reloaded first
"""
return [
'asn1crypto._errors',
'asn1crypto._int',
'asn1crypto._ordereddict',
'asn1crypto._teletex_codec',
'asn1crypto._types',
'asn1crypto._inet',
'asn1crypto._iri',
'asn1crypto.version',
'asn1crypto.pem',
'asn1crypto.util',
'asn1crypto.parser',
'asn1crypto.core',
'asn1crypto.algos',
'asn1crypto.keys',
'asn1crypto.x509',
'asn1crypto.crl',
'asn1crypto.csr',
'asn1crypto.ocsp',
'asn1crypto.cms',
'asn1crypto.pdf',
'asn1crypto.pkcs12',
'asn1crypto.tsp',
'asn1crypto',
]

View File

@ -0,0 +1,54 @@
# coding: utf-8
"""
Exports the following items:
- unwrap()
- APIException()
"""
from __future__ import unicode_literals, division, absolute_import, print_function
import re
import textwrap
class APIException(Exception):
"""
An exception indicating an API has been removed from asn1crypto
"""
pass
def unwrap(string, *params):
"""
Takes a multi-line string and does the following:
- dedents
- converts newlines with text before and after into a single line
- strips leading and trailing whitespace
:param string:
The string to format
:param *params:
Params to interpolate into the string
:return:
The formatted string
"""
output = textwrap.dedent(string)
# Unwrap lines, taking into account bulleted lists, ordered lists and
# underlines consisting of = signs
if output.find('\n') != -1:
output = re.sub('(?<=\\S)\n(?=[^ \n\t\\d\\*\\-=])', ' ', output)
if params:
output = output % params
output = output.strip()
return output

View File

@ -0,0 +1,170 @@
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
import socket
import struct
from ._errors import unwrap
from ._types import byte_cls, bytes_to_list, str_cls, type_name
def inet_ntop(address_family, packed_ip):
"""
Windows compatibility shim for socket.inet_ntop().
:param address_family:
socket.AF_INET for IPv4 or socket.AF_INET6 for IPv6
:param packed_ip:
A byte string of the network form of an IP address
:return:
A unicode string of the IP address
"""
if address_family not in set([socket.AF_INET, socket.AF_INET6]):
raise ValueError(unwrap(
'''
address_family must be socket.AF_INET (%s) or socket.AF_INET6 (%s),
not %s
''',
repr(socket.AF_INET),
repr(socket.AF_INET6),
repr(address_family)
))
if not isinstance(packed_ip, byte_cls):
raise TypeError(unwrap(
'''
packed_ip must be a byte string, not %s
''',
type_name(packed_ip)
))
required_len = 4 if address_family == socket.AF_INET else 16
if len(packed_ip) != required_len:
raise ValueError(unwrap(
'''
packed_ip must be %d bytes long - is %d
''',
required_len,
len(packed_ip)
))
if address_family == socket.AF_INET:
return '%d.%d.%d.%d' % tuple(bytes_to_list(packed_ip))
octets = struct.unpack(b'!HHHHHHHH', packed_ip)
runs_of_zero = {}
longest_run = 0
zero_index = None
for i, octet in enumerate(octets + (-1,)):
if octet != 0:
if zero_index is not None:
length = i - zero_index
if length not in runs_of_zero:
runs_of_zero[length] = zero_index
longest_run = max(longest_run, length)
zero_index = None
elif zero_index is None:
zero_index = i
hexed = [hex(o)[2:] for o in octets]
if longest_run < 2:
return ':'.join(hexed)
zero_start = runs_of_zero[longest_run]
zero_end = zero_start + longest_run
return ':'.join(hexed[:zero_start]) + '::' + ':'.join(hexed[zero_end:])
def inet_pton(address_family, ip_string):
"""
Windows compatibility shim for socket.inet_ntop().
:param address_family:
socket.AF_INET for IPv4 or socket.AF_INET6 for IPv6
:param ip_string:
A unicode string of an IP address
:return:
A byte string of the network form of the IP address
"""
if address_family not in set([socket.AF_INET, socket.AF_INET6]):
raise ValueError(unwrap(
'''
address_family must be socket.AF_INET (%s) or socket.AF_INET6 (%s),
not %s
''',
repr(socket.AF_INET),
repr(socket.AF_INET6),
repr(address_family)
))
if not isinstance(ip_string, str_cls):
raise TypeError(unwrap(
'''
ip_string must be a unicode string, not %s
''',
type_name(ip_string)
))
if address_family == socket.AF_INET:
octets = ip_string.split('.')
error = len(octets) != 4
if not error:
ints = []
for o in octets:
o = int(o)
if o > 255 or o < 0:
error = True
break
ints.append(o)
if error:
raise ValueError(unwrap(
'''
ip_string must be a dotted string with four integers in the
range of 0 to 255, got %s
''',
repr(ip_string)
))
return struct.pack(b'!BBBB', *ints)
error = False
omitted = ip_string.count('::')
if omitted > 1:
error = True
elif omitted == 0:
octets = ip_string.split(':')
error = len(octets) != 8
else:
begin, end = ip_string.split('::')
begin_octets = begin.split(':')
end_octets = end.split(':')
missing = 8 - len(begin_octets) - len(end_octets)
octets = begin_octets + (['0'] * missing) + end_octets
if not error:
ints = []
for o in octets:
o = int(o, 16)
if o > 65535 or o < 0:
error = True
break
ints.append(o)
return struct.pack(b'!HHHHHHHH', *ints)
raise ValueError(unwrap(
'''
ip_string must be a valid ipv6 string, got %s
''',
repr(ip_string)
))

View File

@ -0,0 +1,22 @@
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
def fill_width(bytes_, width):
"""
Ensure a byte string representing a positive integer is a specific width
(in bytes)
:param bytes_:
The integer byte string
:param width:
The desired width as an integer
:return:
A byte string of the width specified
"""
while len(bytes_) < width:
bytes_ = b'\x00' + bytes_
return bytes_

View File

@ -0,0 +1,291 @@
# coding: utf-8
"""
Functions to convert unicode IRIs into ASCII byte string URIs and back. Exports
the following items:
- iri_to_uri()
- uri_to_iri()
"""
from __future__ import unicode_literals, division, absolute_import, print_function
from encodings import idna # noqa
import codecs
import re
import sys
from ._errors import unwrap
from ._types import byte_cls, str_cls, type_name, bytes_to_list, int_types
if sys.version_info < (3,):
from urlparse import urlsplit, urlunsplit
from urllib import (
quote as urlquote,
unquote as unquote_to_bytes,
)
else:
from urllib.parse import (
quote as urlquote,
unquote_to_bytes,
urlsplit,
urlunsplit,
)
def iri_to_uri(value, normalize=False):
"""
Encodes a unicode IRI into an ASCII byte string URI
:param value:
A unicode string of an IRI
:param normalize:
A bool that controls URI normalization
:return:
A byte string of the ASCII-encoded URI
"""
if not isinstance(value, str_cls):
raise TypeError(unwrap(
'''
value must be a unicode string, not %s
''',
type_name(value)
))
scheme = None
# Python 2.6 doesn't split properly is the URL doesn't start with http:// or https://
if sys.version_info < (2, 7) and not value.startswith('http://') and not value.startswith('https://'):
real_prefix = None
prefix_match = re.match('^[^:]*://', value)
if prefix_match:
real_prefix = prefix_match.group(0)
value = 'http://' + value[len(real_prefix):]
parsed = urlsplit(value)
if real_prefix:
value = real_prefix + value[7:]
scheme = _urlquote(real_prefix[:-3])
else:
parsed = urlsplit(value)
if scheme is None:
scheme = _urlquote(parsed.scheme)
hostname = parsed.hostname
if hostname is not None:
hostname = hostname.encode('idna')
# RFC 3986 allows userinfo to contain sub-delims
username = _urlquote(parsed.username, safe='!$&\'()*+,;=')
password = _urlquote(parsed.password, safe='!$&\'()*+,;=')
port = parsed.port
if port is not None:
port = str_cls(port).encode('ascii')
netloc = b''
if username is not None:
netloc += username
if password:
netloc += b':' + password
netloc += b'@'
if hostname is not None:
netloc += hostname
if port is not None:
default_http = scheme == b'http' and port == b'80'
default_https = scheme == b'https' and port == b'443'
if not normalize or (not default_http and not default_https):
netloc += b':' + port
# RFC 3986 allows a path to contain sub-delims, plus "@" and ":"
path = _urlquote(parsed.path, safe='/!$&\'()*+,;=@:')
# RFC 3986 allows the query to contain sub-delims, plus "@", ":" , "/" and "?"
query = _urlquote(parsed.query, safe='/?!$&\'()*+,;=@:')
# RFC 3986 allows the fragment to contain sub-delims, plus "@", ":" , "/" and "?"
fragment = _urlquote(parsed.fragment, safe='/?!$&\'()*+,;=@:')
if normalize and query is None and fragment is None and path == b'/':
path = None
# Python 2.7 compat
if path is None:
path = ''
output = urlunsplit((scheme, netloc, path, query, fragment))
if isinstance(output, str_cls):
output = output.encode('latin1')
return output
def uri_to_iri(value):
"""
Converts an ASCII URI byte string into a unicode IRI
:param value:
An ASCII-encoded byte string of the URI
:return:
A unicode string of the IRI
"""
if not isinstance(value, byte_cls):
raise TypeError(unwrap(
'''
value must be a byte string, not %s
''',
type_name(value)
))
parsed = urlsplit(value)
scheme = parsed.scheme
if scheme is not None:
scheme = scheme.decode('ascii')
username = _urlunquote(parsed.username, remap=[':', '@'])
password = _urlunquote(parsed.password, remap=[':', '@'])
hostname = parsed.hostname
if hostname:
hostname = hostname.decode('idna')
port = parsed.port
if port and not isinstance(port, int_types):
port = port.decode('ascii')
netloc = ''
if username is not None:
netloc += username
if password:
netloc += ':' + password
netloc += '@'
if hostname is not None:
netloc += hostname
if port is not None:
netloc += ':' + str_cls(port)
path = _urlunquote(parsed.path, remap=['/'], preserve=True)
query = _urlunquote(parsed.query, remap=['&', '='], preserve=True)
fragment = _urlunquote(parsed.fragment)
return urlunsplit((scheme, netloc, path, query, fragment))
def _iri_utf8_errors_handler(exc):
"""
Error handler for decoding UTF-8 parts of a URI into an IRI. Leaves byte
sequences encoded in %XX format, but as part of a unicode string.
:param exc:
The UnicodeDecodeError exception
:return:
A 2-element tuple of (replacement unicode string, integer index to
resume at)
"""
bytes_as_ints = bytes_to_list(exc.object[exc.start:exc.end])
replacements = ['%%%02x' % num for num in bytes_as_ints]
return (''.join(replacements), exc.end)
codecs.register_error('iriutf8', _iri_utf8_errors_handler)
def _urlquote(string, safe=''):
"""
Quotes a unicode string for use in a URL
:param string:
A unicode string
:param safe:
A unicode string of character to not encode
:return:
None (if string is None) or an ASCII byte string of the quoted string
"""
if string is None or string == '':
return None
# Anything already hex quoted is pulled out of the URL and unquoted if
# possible
escapes = []
if re.search('%[0-9a-fA-F]{2}', string):
# Try to unquote any percent values, restoring them if they are not
# valid UTF-8. Also, requote any safe chars since encoded versions of
# those are functionally different than the unquoted ones.
def _try_unescape(match):
byte_string = unquote_to_bytes(match.group(0))
unicode_string = byte_string.decode('utf-8', 'iriutf8')
for safe_char in list(safe):
unicode_string = unicode_string.replace(safe_char, '%%%02x' % ord(safe_char))
return unicode_string
string = re.sub('(?:%[0-9a-fA-F]{2})+', _try_unescape, string)
# Once we have the minimal set of hex quoted values, removed them from
# the string so that they are not double quoted
def _extract_escape(match):
escapes.append(match.group(0).encode('ascii'))
return '\x00'
string = re.sub('%[0-9a-fA-F]{2}', _extract_escape, string)
output = urlquote(string.encode('utf-8'), safe=safe.encode('utf-8'))
if not isinstance(output, byte_cls):
output = output.encode('ascii')
# Restore the existing quoted values that we extracted
if len(escapes) > 0:
def _return_escape(_):
return escapes.pop(0)
output = re.sub(b'%00', _return_escape, output)
return output
def _urlunquote(byte_string, remap=None, preserve=None):
"""
Unquotes a URI portion from a byte string into unicode using UTF-8
:param byte_string:
A byte string of the data to unquote
:param remap:
A list of characters (as unicode) that should be re-mapped to a
%XX encoding. This is used when characters are not valid in part of a
URL.
:param preserve:
A bool - indicates that the chars to be remapped if they occur in
non-hex form, should be preserved. E.g. / for URL path.
:return:
A unicode string
"""
if byte_string is None:
return byte_string
if byte_string == b'':
return ''
if preserve:
replacements = ['\x1A', '\x1C', '\x1D', '\x1E', '\x1F']
preserve_unmap = {}
for char in remap:
replacement = replacements.pop(0)
preserve_unmap[replacement] = char
byte_string = byte_string.replace(char.encode('ascii'), replacement.encode('ascii'))
byte_string = unquote_to_bytes(byte_string)
if remap:
for char in remap:
byte_string = byte_string.replace(char.encode('ascii'), ('%%%02x' % ord(char)).encode('ascii'))
output = byte_string.decode('utf-8', 'iriutf8')
if preserve:
for replacement, original in preserve_unmap.items():
output = output.replace(replacement, original)
return output

View File

@ -0,0 +1,135 @@
# Copyright (c) 2009 Raymond Hettinger
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
import sys
if not sys.version_info < (2, 7):
from collections import OrderedDict
else:
from UserDict import DictMixin
class OrderedDict(dict, DictMixin):
def __init__(self, *args, **kwds):
if len(args) > 1:
raise TypeError('expected at most 1 arguments, got %d' % len(args))
try:
self.__end
except AttributeError:
self.clear()
self.update(*args, **kwds)
def clear(self):
self.__end = end = []
end += [None, end, end] # sentinel node for doubly linked list
self.__map = {} # key --> [key, prev, next]
dict.clear(self)
def __setitem__(self, key, value):
if key not in self:
end = self.__end
curr = end[1]
curr[2] = end[1] = self.__map[key] = [key, curr, end]
dict.__setitem__(self, key, value)
def __delitem__(self, key):
dict.__delitem__(self, key)
key, prev, next_ = self.__map.pop(key)
prev[2] = next_
next_[1] = prev
def __iter__(self):
end = self.__end
curr = end[2]
while curr is not end:
yield curr[0]
curr = curr[2]
def __reversed__(self):
end = self.__end
curr = end[1]
while curr is not end:
yield curr[0]
curr = curr[1]
def popitem(self, last=True):
if not self:
raise KeyError('dictionary is empty')
if last:
key = reversed(self).next()
else:
key = iter(self).next()
value = self.pop(key)
return key, value
def __reduce__(self):
items = [[k, self[k]] for k in self]
tmp = self.__map, self.__end
del self.__map, self.__end
inst_dict = vars(self).copy()
self.__map, self.__end = tmp
if inst_dict:
return (self.__class__, (items,), inst_dict)
return self.__class__, (items,)
def keys(self):
return list(self)
setdefault = DictMixin.setdefault
update = DictMixin.update
pop = DictMixin.pop
values = DictMixin.values
items = DictMixin.items
iterkeys = DictMixin.iterkeys
itervalues = DictMixin.itervalues
iteritems = DictMixin.iteritems
def __repr__(self):
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, self.items())
def copy(self):
return self.__class__(self)
@classmethod
def fromkeys(cls, iterable, value=None):
d = cls()
for key in iterable:
d[key] = value
return d
def __eq__(self, other):
if isinstance(other, OrderedDict):
if len(self) != len(other):
return False
for p, q in zip(self.items(), other.items()):
if p != q:
return False
return True
return dict.__eq__(self, other)
def __ne__(self, other):
return not self == other

View File

@ -0,0 +1,331 @@
# coding: utf-8
"""
Implementation of the teletex T.61 codec. Exports the following items:
- register()
"""
from __future__ import unicode_literals, division, absolute_import, print_function
import codecs
class TeletexCodec(codecs.Codec):
def encode(self, input_, errors='strict'):
return codecs.charmap_encode(input_, errors, ENCODING_TABLE)
def decode(self, input_, errors='strict'):
return codecs.charmap_decode(input_, errors, DECODING_TABLE)
class TeletexIncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input_, final=False):
return codecs.charmap_encode(input_, self.errors, ENCODING_TABLE)[0]
class TeletexIncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input_, final=False):
return codecs.charmap_decode(input_, self.errors, DECODING_TABLE)[0]
class TeletexStreamWriter(TeletexCodec, codecs.StreamWriter):
pass
class TeletexStreamReader(TeletexCodec, codecs.StreamReader):
pass
def teletex_search_function(name):
"""
Search function for teletex codec that is passed to codecs.register()
"""
if name != 'teletex':
return None
return codecs.CodecInfo(
name='teletex',
encode=TeletexCodec().encode,
decode=TeletexCodec().decode,
incrementalencoder=TeletexIncrementalEncoder,
incrementaldecoder=TeletexIncrementalDecoder,
streamreader=TeletexStreamReader,
streamwriter=TeletexStreamWriter,
)
def register():
"""
Registers the teletex codec
"""
codecs.register(teletex_search_function)
# http://en.wikipedia.org/wiki/ITU_T.61
DECODING_TABLE = (
'\u0000'
'\u0001'
'\u0002'
'\u0003'
'\u0004'
'\u0005'
'\u0006'
'\u0007'
'\u0008'
'\u0009'
'\u000A'
'\u000B'
'\u000C'
'\u000D'
'\u000E'
'\u000F'
'\u0010'
'\u0011'
'\u0012'
'\u0013'
'\u0014'
'\u0015'
'\u0016'
'\u0017'
'\u0018'
'\u0019'
'\u001A'
'\u001B'
'\u001C'
'\u001D'
'\u001E'
'\u001F'
'\u0020'
'\u0021'
'\u0022'
'\ufffe'
'\ufffe'
'\u0025'
'\u0026'
'\u0027'
'\u0028'
'\u0029'
'\u002A'
'\u002B'
'\u002C'
'\u002D'
'\u002E'
'\u002F'
'\u0030'
'\u0031'
'\u0032'
'\u0033'
'\u0034'
'\u0035'
'\u0036'
'\u0037'
'\u0038'
'\u0039'
'\u003A'
'\u003B'
'\u003C'
'\u003D'
'\u003E'
'\u003F'
'\u0040'
'\u0041'
'\u0042'
'\u0043'
'\u0044'
'\u0045'
'\u0046'
'\u0047'
'\u0048'
'\u0049'
'\u004A'
'\u004B'
'\u004C'
'\u004D'
'\u004E'
'\u004F'
'\u0050'
'\u0051'
'\u0052'
'\u0053'
'\u0054'
'\u0055'
'\u0056'
'\u0057'
'\u0058'
'\u0059'
'\u005A'
'\u005B'
'\ufffe'
'\u005D'
'\ufffe'
'\u005F'
'\ufffe'
'\u0061'
'\u0062'
'\u0063'
'\u0064'
'\u0065'
'\u0066'
'\u0067'
'\u0068'
'\u0069'
'\u006A'
'\u006B'
'\u006C'
'\u006D'
'\u006E'
'\u006F'
'\u0070'
'\u0071'
'\u0072'
'\u0073'
'\u0074'
'\u0075'
'\u0076'
'\u0077'
'\u0078'
'\u0079'
'\u007A'
'\ufffe'
'\u007C'
'\ufffe'
'\ufffe'
'\u007F'
'\u0080'
'\u0081'
'\u0082'
'\u0083'
'\u0084'
'\u0085'
'\u0086'
'\u0087'
'\u0088'
'\u0089'
'\u008A'
'\u008B'
'\u008C'
'\u008D'
'\u008E'
'\u008F'
'\u0090'
'\u0091'
'\u0092'
'\u0093'
'\u0094'
'\u0095'
'\u0096'
'\u0097'
'\u0098'
'\u0099'
'\u009A'
'\u009B'
'\u009C'
'\u009D'
'\u009E'
'\u009F'
'\u00A0'
'\u00A1'
'\u00A2'
'\u00A3'
'\u0024'
'\u00A5'
'\u0023'
'\u00A7'
'\u00A4'
'\ufffe'
'\ufffe'
'\u00AB'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\u00B0'
'\u00B1'
'\u00B2'
'\u00B3'
'\u00D7'
'\u00B5'
'\u00B6'
'\u00B7'
'\u00F7'
'\ufffe'
'\ufffe'
'\u00BB'
'\u00BC'
'\u00BD'
'\u00BE'
'\u00BF'
'\ufffe'
'\u0300'
'\u0301'
'\u0302'
'\u0303'
'\u0304'
'\u0306'
'\u0307'
'\u0308'
'\ufffe'
'\u030A'
'\u0327'
'\u0332'
'\u030B'
'\u0328'
'\u030C'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\u2126'
'\u00C6'
'\u00D0'
'\u00AA'
'\u0126'
'\ufffe'
'\u0132'
'\u013F'
'\u0141'
'\u00D8'
'\u0152'
'\u00BA'
'\u00DE'
'\u0166'
'\u014A'
'\u0149'
'\u0138'
'\u00E6'
'\u0111'
'\u00F0'
'\u0127'
'\u0131'
'\u0133'
'\u0140'
'\u0142'
'\u00F8'
'\u0153'
'\u00DF'
'\u00FE'
'\u0167'
'\u014B'
'\ufffe'
)
ENCODING_TABLE = codecs.charmap_build(DECODING_TABLE)

View File

@ -0,0 +1,46 @@
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
import inspect
import sys
if sys.version_info < (3,):
str_cls = unicode # noqa
byte_cls = str
int_types = (int, long) # noqa
def bytes_to_list(byte_string):
return [ord(b) for b in byte_string]
chr_cls = chr
else:
str_cls = str
byte_cls = bytes
int_types = int
bytes_to_list = list
def chr_cls(num):
return bytes([num])
def type_name(value):
"""
Returns a user-readable name for the type of an object
:param value:
A value to get the type name of
:return:
A unicode string of the object's type name
"""
if inspect.isclass(value):
cls = value
else:
cls = value.__class__
if cls.__module__ in set(['builtins', '__builtin__']):
return cls.__name__
return '%s.%s' % (cls.__module__, cls.__name__)

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,984 @@
# coding: utf-8
"""
ASN.1 type classes for cryptographic message syntax (CMS). Structures are also
compatible with PKCS#7. Exports the following items:
- AuthenticatedData()
- AuthEnvelopedData()
- CompressedData()
- ContentInfo()
- DigestedData()
- EncryptedData()
- EnvelopedData()
- SignedAndEnvelopedData()
- SignedData()
Other type classes are defined that help compose the types listed above.
Most CMS structures in the wild are formatted as ContentInfo encapsulating one of the other types.
"""
from __future__ import unicode_literals, division, absolute_import, print_function
try:
import zlib
except (ImportError):
zlib = None
from .algos import (
_ForceNullParameters,
DigestAlgorithm,
EncryptionAlgorithm,
HmacAlgorithm,
KdfAlgorithm,
RSAESOAEPParams,
SignedDigestAlgorithm,
)
from .core import (
Any,
BitString,
Choice,
Enumerated,
GeneralizedTime,
Integer,
ObjectIdentifier,
OctetBitString,
OctetString,
ParsableOctetString,
Sequence,
SequenceOf,
SetOf,
UTCTime,
UTF8String,
)
from .crl import CertificateList
from .keys import PublicKeyInfo
from .ocsp import OCSPResponse
from .x509 import Attributes, Certificate, Extensions, GeneralName, GeneralNames, Name
# These structures are taken from
# ftp://ftp.rsasecurity.com/pub/pkcs/ascii/pkcs-6.asc
class ExtendedCertificateInfo(Sequence):
_fields = [
('version', Integer),
('certificate', Certificate),
('attributes', Attributes),
]
class ExtendedCertificate(Sequence):
_fields = [
('extended_certificate_info', ExtendedCertificateInfo),
('signature_algorithm', SignedDigestAlgorithm),
('signature', OctetBitString),
]
# These structures are taken from https://tools.ietf.org/html/rfc5652,
# https://tools.ietf.org/html/rfc5083, http://tools.ietf.org/html/rfc2315,
# https://tools.ietf.org/html/rfc5940, https://tools.ietf.org/html/rfc3274,
# https://tools.ietf.org/html/rfc3281
class CMSVersion(Integer):
_map = {
0: 'v0',
1: 'v1',
2: 'v2',
3: 'v3',
4: 'v4',
5: 'v5',
}
class CMSAttributeType(ObjectIdentifier):
_map = {
'1.2.840.113549.1.9.3': 'content_type',
'1.2.840.113549.1.9.4': 'message_digest',
'1.2.840.113549.1.9.5': 'signing_time',
'1.2.840.113549.1.9.6': 'counter_signature',
# https://tools.ietf.org/html/rfc2633#page-26
'1.2.840.113549.1.9.16.2.11': 'encrypt_key_pref',
# https://tools.ietf.org/html/rfc3161#page-20
'1.2.840.113549.1.9.16.2.14': 'signature_time_stamp_token',
# https://tools.ietf.org/html/rfc6211#page-5
'1.2.840.113549.1.9.52': 'cms_algorithm_protection',
# https://docs.microsoft.com/en-us/previous-versions/hh968145(v%3Dvs.85)
'1.3.6.1.4.1.311.2.4.1': 'microsoft_nested_signature',
# Some places refer to this as SPC_RFC3161_OBJID, others szOID_RFC3161_counterSign.
# https://docs.microsoft.com/en-us/windows/win32/api/wincrypt/ns-wincrypt-crypt_algorithm_identifier
# refers to szOID_RFC3161_counterSign as "1.2.840.113549.1.9.16.1.4",
# but that OID is also called szOID_TIMESTAMP_TOKEN. Because of there being
# no canonical source for this OID, we give it our own name
'1.3.6.1.4.1.311.3.3.1': 'microsoft_time_stamp_token',
}
class Time(Choice):
_alternatives = [
('utc_time', UTCTime),
('generalized_time', GeneralizedTime),
]
class ContentType(ObjectIdentifier):
_map = {
'1.2.840.113549.1.7.1': 'data',
'1.2.840.113549.1.7.2': 'signed_data',
'1.2.840.113549.1.7.3': 'enveloped_data',
'1.2.840.113549.1.7.4': 'signed_and_enveloped_data',
'1.2.840.113549.1.7.5': 'digested_data',
'1.2.840.113549.1.7.6': 'encrypted_data',
'1.2.840.113549.1.9.16.1.2': 'authenticated_data',
'1.2.840.113549.1.9.16.1.9': 'compressed_data',
'1.2.840.113549.1.9.16.1.23': 'authenticated_enveloped_data',
}
class CMSAlgorithmProtection(Sequence):
_fields = [
('digest_algorithm', DigestAlgorithm),
('signature_algorithm', SignedDigestAlgorithm, {'implicit': 1, 'optional': True}),
('mac_algorithm', HmacAlgorithm, {'implicit': 2, 'optional': True}),
]
class SetOfContentType(SetOf):
_child_spec = ContentType
class SetOfOctetString(SetOf):
_child_spec = OctetString
class SetOfTime(SetOf):
_child_spec = Time
class SetOfAny(SetOf):
_child_spec = Any
class SetOfCMSAlgorithmProtection(SetOf):
_child_spec = CMSAlgorithmProtection
class CMSAttribute(Sequence):
_fields = [
('type', CMSAttributeType),
('values', None),
]
_oid_specs = {}
def _values_spec(self):
return self._oid_specs.get(self['type'].native, SetOfAny)
_spec_callbacks = {
'values': _values_spec
}
class CMSAttributes(SetOf):
_child_spec = CMSAttribute
class IssuerSerial(Sequence):
_fields = [
('issuer', GeneralNames),
('serial', Integer),
('issuer_uid', OctetBitString, {'optional': True}),
]
class AttCertVersion(Integer):
_map = {
0: 'v1',
1: 'v2',
}
class AttCertSubject(Choice):
_alternatives = [
('base_certificate_id', IssuerSerial, {'explicit': 0}),
('subject_name', GeneralNames, {'explicit': 1}),
]
class AttCertValidityPeriod(Sequence):
_fields = [
('not_before_time', GeneralizedTime),
('not_after_time', GeneralizedTime),
]
class AttributeCertificateInfoV1(Sequence):
_fields = [
('version', AttCertVersion, {'default': 'v1'}),
('subject', AttCertSubject),
('issuer', GeneralNames),
('signature', SignedDigestAlgorithm),
('serial_number', Integer),
('att_cert_validity_period', AttCertValidityPeriod),
('attributes', Attributes),
('issuer_unique_id', OctetBitString, {'optional': True}),
('extensions', Extensions, {'optional': True}),
]
class AttributeCertificateV1(Sequence):
_fields = [
('ac_info', AttributeCertificateInfoV1),
('signature_algorithm', SignedDigestAlgorithm),
('signature', OctetBitString),
]
class DigestedObjectType(Enumerated):
_map = {
0: 'public_key',
1: 'public_key_cert',
2: 'other_objy_types',
}
class ObjectDigestInfo(Sequence):
_fields = [
('digested_object_type', DigestedObjectType),
('other_object_type_id', ObjectIdentifier, {'optional': True}),
('digest_algorithm', DigestAlgorithm),
('object_digest', OctetBitString),
]
class Holder(Sequence):
_fields = [
('base_certificate_id', IssuerSerial, {'implicit': 0, 'optional': True}),
('entity_name', GeneralNames, {'implicit': 1, 'optional': True}),
('object_digest_info', ObjectDigestInfo, {'implicit': 2, 'optional': True}),
]
class V2Form(Sequence):
_fields = [
('issuer_name', GeneralNames, {'optional': True}),
('base_certificate_id', IssuerSerial, {'explicit': 0, 'optional': True}),
('object_digest_info', ObjectDigestInfo, {'explicit': 1, 'optional': True}),
]
class AttCertIssuer(Choice):
_alternatives = [
('v1_form', GeneralNames),
('v2_form', V2Form, {'explicit': 0}),
]
class IetfAttrValue(Choice):
_alternatives = [
('octets', OctetString),
('oid', ObjectIdentifier),
('string', UTF8String),
]
class IetfAttrValues(SequenceOf):
_child_spec = IetfAttrValue
class IetfAttrSyntax(Sequence):
_fields = [
('policy_authority', GeneralNames, {'implicit': 0, 'optional': True}),
('values', IetfAttrValues),
]
class SetOfIetfAttrSyntax(SetOf):
_child_spec = IetfAttrSyntax
class SvceAuthInfo(Sequence):
_fields = [
('service', GeneralName),
('ident', GeneralName),
('auth_info', OctetString, {'optional': True}),
]
class SetOfSvceAuthInfo(SetOf):
_child_spec = SvceAuthInfo
class RoleSyntax(Sequence):
_fields = [
('role_authority', GeneralNames, {'implicit': 0, 'optional': True}),
('role_name', GeneralName, {'implicit': 1}),
]
class SetOfRoleSyntax(SetOf):
_child_spec = RoleSyntax
class ClassList(BitString):
_map = {
0: 'unmarked',
1: 'unclassified',
2: 'restricted',
3: 'confidential',
4: 'secret',
5: 'top_secret',
}
class SecurityCategory(Sequence):
_fields = [
('type', ObjectIdentifier, {'implicit': 0}),
('value', Any, {'implicit': 1}),
]
class SetOfSecurityCategory(SetOf):
_child_spec = SecurityCategory
class Clearance(Sequence):
_fields = [
('policy_id', ObjectIdentifier, {'implicit': 0}),
('class_list', ClassList, {'implicit': 1, 'default': 'unclassified'}),
('security_categories', SetOfSecurityCategory, {'implicit': 2, 'optional': True}),
]
class SetOfClearance(SetOf):
_child_spec = Clearance
class BigTime(Sequence):
_fields = [
('major', Integer),
('fractional_seconds', Integer),
('sign', Integer, {'optional': True}),
]
class LeapData(Sequence):
_fields = [
('leap_time', BigTime),
('action', Integer),
]
class SetOfLeapData(SetOf):
_child_spec = LeapData
class TimingMetrics(Sequence):
_fields = [
('ntp_time', BigTime),
('offset', BigTime),
('delay', BigTime),
('expiration', BigTime),
('leap_event', SetOfLeapData, {'optional': True}),
]
class SetOfTimingMetrics(SetOf):
_child_spec = TimingMetrics
class TimingPolicy(Sequence):
_fields = [
('policy_id', SequenceOf, {'spec': ObjectIdentifier}),
('max_offset', BigTime, {'explicit': 0, 'optional': True}),
('max_delay', BigTime, {'explicit': 1, 'optional': True}),
]
class SetOfTimingPolicy(SetOf):
_child_spec = TimingPolicy
class AttCertAttributeType(ObjectIdentifier):
_map = {
'1.3.6.1.5.5.7.10.1': 'authentication_info',
'1.3.6.1.5.5.7.10.2': 'access_identity',
'1.3.6.1.5.5.7.10.3': 'charging_identity',
'1.3.6.1.5.5.7.10.4': 'group',
'2.5.4.72': 'role',
'2.5.4.55': 'clearance',
'1.3.6.1.4.1.601.10.4.1': 'timing_metrics',
'1.3.6.1.4.1.601.10.4.2': 'timing_policy',
}
class AttCertAttribute(Sequence):
_fields = [
('type', AttCertAttributeType),
('values', None),
]
_oid_specs = {
'authentication_info': SetOfSvceAuthInfo,
'access_identity': SetOfSvceAuthInfo,
'charging_identity': SetOfIetfAttrSyntax,
'group': SetOfIetfAttrSyntax,
'role': SetOfRoleSyntax,
'clearance': SetOfClearance,
'timing_metrics': SetOfTimingMetrics,
'timing_policy': SetOfTimingPolicy,
}
def _values_spec(self):
return self._oid_specs.get(self['type'].native, SetOfAny)
_spec_callbacks = {
'values': _values_spec
}
class AttCertAttributes(SequenceOf):
_child_spec = AttCertAttribute
class AttributeCertificateInfoV2(Sequence):
_fields = [
('version', AttCertVersion),
('holder', Holder),
('issuer', AttCertIssuer),
('signature', SignedDigestAlgorithm),
('serial_number', Integer),
('att_cert_validity_period', AttCertValidityPeriod),
('attributes', AttCertAttributes),
('issuer_unique_id', OctetBitString, {'optional': True}),
('extensions', Extensions, {'optional': True}),
]
class AttributeCertificateV2(Sequence):
# Handle the situation where a V2 cert is encoded as V1
_bad_tag = 1
_fields = [
('ac_info', AttributeCertificateInfoV2),
('signature_algorithm', SignedDigestAlgorithm),
('signature', OctetBitString),
]
class OtherCertificateFormat(Sequence):
_fields = [
('other_cert_format', ObjectIdentifier),
('other_cert', Any),
]
class CertificateChoices(Choice):
_alternatives = [
('certificate', Certificate),
('extended_certificate', ExtendedCertificate, {'implicit': 0}),
('v1_attr_cert', AttributeCertificateV1, {'implicit': 1}),
('v2_attr_cert', AttributeCertificateV2, {'implicit': 2}),
('other', OtherCertificateFormat, {'implicit': 3}),
]
def validate(self, class_, tag, contents):
"""
Ensures that the class and tag specified exist as an alternative. This
custom version fixes parsing broken encodings there a V2 attribute
# certificate is encoded as a V1
:param class_:
The integer class_ from the encoded value header
:param tag:
The integer tag from the encoded value header
:param contents:
A byte string of the contents of the value - used when the object
is explicitly tagged
:raises:
ValueError - when value is not a valid alternative
"""
super(CertificateChoices, self).validate(class_, tag, contents)
if self._choice == 2:
if AttCertVersion.load(Sequence.load(contents)[0].dump()).native == 'v2':
self._choice = 3
class CertificateSet(SetOf):
_child_spec = CertificateChoices
class ContentInfo(Sequence):
_fields = [
('content_type', ContentType),
('content', Any, {'explicit': 0, 'optional': True}),
]
_oid_pair = ('content_type', 'content')
_oid_specs = {}
class SetOfContentInfo(SetOf):
_child_spec = ContentInfo
class EncapsulatedContentInfo(Sequence):
_fields = [
('content_type', ContentType),
('content', ParsableOctetString, {'explicit': 0, 'optional': True}),
]
_oid_pair = ('content_type', 'content')
_oid_specs = {}
class IssuerAndSerialNumber(Sequence):
_fields = [
('issuer', Name),
('serial_number', Integer),
]
class SignerIdentifier(Choice):
_alternatives = [
('issuer_and_serial_number', IssuerAndSerialNumber),
('subject_key_identifier', OctetString, {'implicit': 0}),
]
class DigestAlgorithms(SetOf):
_child_spec = DigestAlgorithm
class CertificateRevocationLists(SetOf):
_child_spec = CertificateList
class SCVPReqRes(Sequence):
_fields = [
('request', ContentInfo, {'explicit': 0, 'optional': True}),
('response', ContentInfo),
]
class OtherRevInfoFormatId(ObjectIdentifier):
_map = {
'1.3.6.1.5.5.7.16.2': 'ocsp_response',
'1.3.6.1.5.5.7.16.4': 'scvp',
}
class OtherRevocationInfoFormat(Sequence):
_fields = [
('other_rev_info_format', OtherRevInfoFormatId),
('other_rev_info', Any),
]
_oid_pair = ('other_rev_info_format', 'other_rev_info')
_oid_specs = {
'ocsp_response': OCSPResponse,
'scvp': SCVPReqRes,
}
class RevocationInfoChoice(Choice):
_alternatives = [
('crl', CertificateList),
('other', OtherRevocationInfoFormat, {'implicit': 1}),
]
class RevocationInfoChoices(SetOf):
_child_spec = RevocationInfoChoice
class SignerInfo(Sequence):
_fields = [
('version', CMSVersion),
('sid', SignerIdentifier),
('digest_algorithm', DigestAlgorithm),
('signed_attrs', CMSAttributes, {'implicit': 0, 'optional': True}),
('signature_algorithm', SignedDigestAlgorithm),
('signature', OctetString),
('unsigned_attrs', CMSAttributes, {'implicit': 1, 'optional': True}),
]
class SignerInfos(SetOf):
_child_spec = SignerInfo
class SignedData(Sequence):
_fields = [
('version', CMSVersion),
('digest_algorithms', DigestAlgorithms),
('encap_content_info', None),
('certificates', CertificateSet, {'implicit': 0, 'optional': True}),
('crls', RevocationInfoChoices, {'implicit': 1, 'optional': True}),
('signer_infos', SignerInfos),
]
def _encap_content_info_spec(self):
# If the encap_content_info is version v1, then this could be a PKCS#7
# structure, or a CMS structure. CMS wraps the encoded value in an
# Octet String tag.
# If the version is greater than 1, it is definite CMS
if self['version'].native != 'v1':
return EncapsulatedContentInfo
# Otherwise, the ContentInfo spec from PKCS#7 will be compatible with
# CMS v1 (which only allows Data, an Octet String) and PKCS#7, which
# allows Any
return ContentInfo
_spec_callbacks = {
'encap_content_info': _encap_content_info_spec
}
class OriginatorInfo(Sequence):
_fields = [
('certs', CertificateSet, {'implicit': 0, 'optional': True}),
('crls', RevocationInfoChoices, {'implicit': 1, 'optional': True}),
]
class RecipientIdentifier(Choice):
_alternatives = [
('issuer_and_serial_number', IssuerAndSerialNumber),
('subject_key_identifier', OctetString, {'implicit': 0}),
]
class KeyEncryptionAlgorithmId(ObjectIdentifier):
_map = {
'1.2.840.113549.1.1.1': 'rsaes_pkcs1v15',
'1.2.840.113549.1.1.7': 'rsaes_oaep',
'2.16.840.1.101.3.4.1.5': 'aes128_wrap',
'2.16.840.1.101.3.4.1.8': 'aes128_wrap_pad',
'2.16.840.1.101.3.4.1.25': 'aes192_wrap',
'2.16.840.1.101.3.4.1.28': 'aes192_wrap_pad',
'2.16.840.1.101.3.4.1.45': 'aes256_wrap',
'2.16.840.1.101.3.4.1.48': 'aes256_wrap_pad',
}
_reverse_map = {
'rsa': '1.2.840.113549.1.1.1',
'rsaes_pkcs1v15': '1.2.840.113549.1.1.1',
'rsaes_oaep': '1.2.840.113549.1.1.7',
'aes128_wrap': '2.16.840.1.101.3.4.1.5',
'aes128_wrap_pad': '2.16.840.1.101.3.4.1.8',
'aes192_wrap': '2.16.840.1.101.3.4.1.25',
'aes192_wrap_pad': '2.16.840.1.101.3.4.1.28',
'aes256_wrap': '2.16.840.1.101.3.4.1.45',
'aes256_wrap_pad': '2.16.840.1.101.3.4.1.48',
}
class KeyEncryptionAlgorithm(_ForceNullParameters, Sequence):
_fields = [
('algorithm', KeyEncryptionAlgorithmId),
('parameters', Any, {'optional': True}),
]
_oid_pair = ('algorithm', 'parameters')
_oid_specs = {
'rsaes_oaep': RSAESOAEPParams,
}
class KeyTransRecipientInfo(Sequence):
_fields = [
('version', CMSVersion),
('rid', RecipientIdentifier),
('key_encryption_algorithm', KeyEncryptionAlgorithm),
('encrypted_key', OctetString),
]
class OriginatorIdentifierOrKey(Choice):
_alternatives = [
('issuer_and_serial_number', IssuerAndSerialNumber),
('subject_key_identifier', OctetString, {'implicit': 0}),
('originator_key', PublicKeyInfo, {'implicit': 1}),
]
class OtherKeyAttribute(Sequence):
_fields = [
('key_attr_id', ObjectIdentifier),
('key_attr', Any),
]
class RecipientKeyIdentifier(Sequence):
_fields = [
('subject_key_identifier', OctetString),
('date', GeneralizedTime, {'optional': True}),
('other', OtherKeyAttribute, {'optional': True}),
]
class KeyAgreementRecipientIdentifier(Choice):
_alternatives = [
('issuer_and_serial_number', IssuerAndSerialNumber),
('r_key_id', RecipientKeyIdentifier, {'implicit': 0}),
]
class RecipientEncryptedKey(Sequence):
_fields = [
('rid', KeyAgreementRecipientIdentifier),
('encrypted_key', OctetString),
]
class RecipientEncryptedKeys(SequenceOf):
_child_spec = RecipientEncryptedKey
class KeyAgreeRecipientInfo(Sequence):
_fields = [
('version', CMSVersion),
('originator', OriginatorIdentifierOrKey, {'explicit': 0}),
('ukm', OctetString, {'explicit': 1, 'optional': True}),
('key_encryption_algorithm', KeyEncryptionAlgorithm),
('recipient_encrypted_keys', RecipientEncryptedKeys),
]
class KEKIdentifier(Sequence):
_fields = [
('key_identifier', OctetString),
('date', GeneralizedTime, {'optional': True}),
('other', OtherKeyAttribute, {'optional': True}),
]
class KEKRecipientInfo(Sequence):
_fields = [
('version', CMSVersion),
('kekid', KEKIdentifier),
('key_encryption_algorithm', KeyEncryptionAlgorithm),
('encrypted_key', OctetString),
]
class PasswordRecipientInfo(Sequence):
_fields = [
('version', CMSVersion),
('key_derivation_algorithm', KdfAlgorithm, {'implicit': 0, 'optional': True}),
('key_encryption_algorithm', KeyEncryptionAlgorithm),
('encrypted_key', OctetString),
]
class OtherRecipientInfo(Sequence):
_fields = [
('ori_type', ObjectIdentifier),
('ori_value', Any),
]
class RecipientInfo(Choice):
_alternatives = [
('ktri', KeyTransRecipientInfo),
('kari', KeyAgreeRecipientInfo, {'implicit': 1}),
('kekri', KEKRecipientInfo, {'implicit': 2}),
('pwri', PasswordRecipientInfo, {'implicit': 3}),
('ori', OtherRecipientInfo, {'implicit': 4}),
]
class RecipientInfos(SetOf):
_child_spec = RecipientInfo
class EncryptedContentInfo(Sequence):
_fields = [
('content_type', ContentType),
('content_encryption_algorithm', EncryptionAlgorithm),
('encrypted_content', OctetString, {'implicit': 0, 'optional': True}),
]
class EnvelopedData(Sequence):
_fields = [
('version', CMSVersion),
('originator_info', OriginatorInfo, {'implicit': 0, 'optional': True}),
('recipient_infos', RecipientInfos),
('encrypted_content_info', EncryptedContentInfo),
('unprotected_attrs', CMSAttributes, {'implicit': 1, 'optional': True}),
]
class SignedAndEnvelopedData(Sequence):
_fields = [
('version', CMSVersion),
('recipient_infos', RecipientInfos),
('digest_algorithms', DigestAlgorithms),
('encrypted_content_info', EncryptedContentInfo),
('certificates', CertificateSet, {'implicit': 0, 'optional': True}),
('crls', CertificateRevocationLists, {'implicit': 1, 'optional': True}),
('signer_infos', SignerInfos),
]
class DigestedData(Sequence):
_fields = [
('version', CMSVersion),
('digest_algorithm', DigestAlgorithm),
('encap_content_info', None),
('digest', OctetString),
]
def _encap_content_info_spec(self):
# If the encap_content_info is version v1, then this could be a PKCS#7
# structure, or a CMS structure. CMS wraps the encoded value in an
# Octet String tag.
# If the version is greater than 1, it is definite CMS
if self['version'].native != 'v1':
return EncapsulatedContentInfo
# Otherwise, the ContentInfo spec from PKCS#7 will be compatible with
# CMS v1 (which only allows Data, an Octet String) and PKCS#7, which
# allows Any
return ContentInfo
_spec_callbacks = {
'encap_content_info': _encap_content_info_spec
}
class EncryptedData(Sequence):
_fields = [
('version', CMSVersion),
('encrypted_content_info', EncryptedContentInfo),
('unprotected_attrs', CMSAttributes, {'implicit': 1, 'optional': True}),
]
class AuthenticatedData(Sequence):
_fields = [
('version', CMSVersion),
('originator_info', OriginatorInfo, {'implicit': 0, 'optional': True}),
('recipient_infos', RecipientInfos),
('mac_algorithm', HmacAlgorithm),
('digest_algorithm', DigestAlgorithm, {'implicit': 1, 'optional': True}),
# This does not require the _spec_callbacks approach of SignedData and
# DigestedData since AuthenticatedData was not part of PKCS#7
('encap_content_info', EncapsulatedContentInfo),
('auth_attrs', CMSAttributes, {'implicit': 2, 'optional': True}),
('mac', OctetString),
('unauth_attrs', CMSAttributes, {'implicit': 3, 'optional': True}),
]
class AuthEnvelopedData(Sequence):
_fields = [
('version', CMSVersion),
('originator_info', OriginatorInfo, {'implicit': 0, 'optional': True}),
('recipient_infos', RecipientInfos),
('auth_encrypted_content_info', EncryptedContentInfo),
('auth_attrs', CMSAttributes, {'implicit': 1, 'optional': True}),
('mac', OctetString),
('unauth_attrs', CMSAttributes, {'implicit': 2, 'optional': True}),
]
class CompressionAlgorithmId(ObjectIdentifier):
_map = {
'1.2.840.113549.1.9.16.3.8': 'zlib',
}
class CompressionAlgorithm(Sequence):
_fields = [
('algorithm', CompressionAlgorithmId),
('parameters', Any, {'optional': True}),
]
class CompressedData(Sequence):
_fields = [
('version', CMSVersion),
('compression_algorithm', CompressionAlgorithm),
('encap_content_info', EncapsulatedContentInfo),
]
_decompressed = None
@property
def decompressed(self):
if self._decompressed is None:
if zlib is None:
raise SystemError('The zlib module is not available')
self._decompressed = zlib.decompress(self['encap_content_info']['content'].native)
return self._decompressed
class RecipientKeyIdentifier(Sequence):
_fields = [
('subjectKeyIdentifier', OctetString),
('date', GeneralizedTime, {'optional': True}),
('other', OtherKeyAttribute, {'optional': True}),
]
class SMIMEEncryptionKeyPreference(Choice):
_alternatives = [
('issuer_and_serial_number', IssuerAndSerialNumber, {'implicit': 0}),
('recipientKeyId', RecipientKeyIdentifier, {'implicit': 1}),
('subjectAltKeyIdentifier', PublicKeyInfo, {'implicit': 2}),
]
class SMIMEEncryptionKeyPreferences(SetOf):
_child_spec = SMIMEEncryptionKeyPreference
ContentInfo._oid_specs = {
'data': OctetString,
'signed_data': SignedData,
'enveloped_data': EnvelopedData,
'signed_and_enveloped_data': SignedAndEnvelopedData,
'digested_data': DigestedData,
'encrypted_data': EncryptedData,
'authenticated_data': AuthenticatedData,
'compressed_data': CompressedData,
'authenticated_enveloped_data': AuthEnvelopedData,
}
EncapsulatedContentInfo._oid_specs = {
'signed_data': SignedData,
'enveloped_data': EnvelopedData,
'signed_and_enveloped_data': SignedAndEnvelopedData,
'digested_data': DigestedData,
'encrypted_data': EncryptedData,
'authenticated_data': AuthenticatedData,
'compressed_data': CompressedData,
'authenticated_enveloped_data': AuthEnvelopedData,
}
CMSAttribute._oid_specs = {
'content_type': SetOfContentType,
'message_digest': SetOfOctetString,
'signing_time': SetOfTime,
'counter_signature': SignerInfos,
'signature_time_stamp_token': SetOfContentInfo,
'cms_algorithm_protection': SetOfCMSAlgorithmProtection,
'microsoft_nested_signature': SetOfContentInfo,
'microsoft_time_stamp_token': SetOfContentInfo,
'encrypt_key_pref': SMIMEEncryptionKeyPreferences,
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,536 @@
# coding: utf-8
"""
ASN.1 type classes for certificate revocation lists (CRL). Exports the
following items:
- CertificateList()
Other type classes are defined that help compose the types listed above.
"""
from __future__ import unicode_literals, division, absolute_import, print_function
import hashlib
from .algos import SignedDigestAlgorithm
from .core import (
Boolean,
Enumerated,
GeneralizedTime,
Integer,
ObjectIdentifier,
OctetBitString,
ParsableOctetString,
Sequence,
SequenceOf,
)
from .x509 import (
AuthorityInfoAccessSyntax,
AuthorityKeyIdentifier,
CRLDistributionPoints,
DistributionPointName,
GeneralNames,
Name,
ReasonFlags,
Time,
)
# The structures in this file are taken from https://tools.ietf.org/html/rfc5280
class Version(Integer):
_map = {
0: 'v1',
1: 'v2',
2: 'v3',
}
class IssuingDistributionPoint(Sequence):
_fields = [
('distribution_point', DistributionPointName, {'explicit': 0, 'optional': True}),
('only_contains_user_certs', Boolean, {'implicit': 1, 'default': False}),
('only_contains_ca_certs', Boolean, {'implicit': 2, 'default': False}),
('only_some_reasons', ReasonFlags, {'implicit': 3, 'optional': True}),
('indirect_crl', Boolean, {'implicit': 4, 'default': False}),
('only_contains_attribute_certs', Boolean, {'implicit': 5, 'default': False}),
]
class TBSCertListExtensionId(ObjectIdentifier):
_map = {
'2.5.29.18': 'issuer_alt_name',
'2.5.29.20': 'crl_number',
'2.5.29.27': 'delta_crl_indicator',
'2.5.29.28': 'issuing_distribution_point',
'2.5.29.35': 'authority_key_identifier',
'2.5.29.46': 'freshest_crl',
'1.3.6.1.5.5.7.1.1': 'authority_information_access',
}
class TBSCertListExtension(Sequence):
_fields = [
('extn_id', TBSCertListExtensionId),
('critical', Boolean, {'default': False}),
('extn_value', ParsableOctetString),
]
_oid_pair = ('extn_id', 'extn_value')
_oid_specs = {
'issuer_alt_name': GeneralNames,
'crl_number': Integer,
'delta_crl_indicator': Integer,
'issuing_distribution_point': IssuingDistributionPoint,
'authority_key_identifier': AuthorityKeyIdentifier,
'freshest_crl': CRLDistributionPoints,
'authority_information_access': AuthorityInfoAccessSyntax,
}
class TBSCertListExtensions(SequenceOf):
_child_spec = TBSCertListExtension
class CRLReason(Enumerated):
_map = {
0: 'unspecified',
1: 'key_compromise',
2: 'ca_compromise',
3: 'affiliation_changed',
4: 'superseded',
5: 'cessation_of_operation',
6: 'certificate_hold',
8: 'remove_from_crl',
9: 'privilege_withdrawn',
10: 'aa_compromise',
}
@property
def human_friendly(self):
"""
:return:
A unicode string with revocation description that is suitable to
show to end-users. Starts with a lower case letter and phrased in
such a way that it makes sense after the phrase "because of" or
"due to".
"""
return {
'unspecified': 'an unspecified reason',
'key_compromise': 'a compromised key',
'ca_compromise': 'the CA being compromised',
'affiliation_changed': 'an affiliation change',
'superseded': 'certificate supersession',
'cessation_of_operation': 'a cessation of operation',
'certificate_hold': 'a certificate hold',
'remove_from_crl': 'removal from the CRL',
'privilege_withdrawn': 'privilege withdrawl',
'aa_compromise': 'the AA being compromised',
}[self.native]
class CRLEntryExtensionId(ObjectIdentifier):
_map = {
'2.5.29.21': 'crl_reason',
'2.5.29.23': 'hold_instruction_code',
'2.5.29.24': 'invalidity_date',
'2.5.29.29': 'certificate_issuer',
}
class CRLEntryExtension(Sequence):
_fields = [
('extn_id', CRLEntryExtensionId),
('critical', Boolean, {'default': False}),
('extn_value', ParsableOctetString),
]
_oid_pair = ('extn_id', 'extn_value')
_oid_specs = {
'crl_reason': CRLReason,
'hold_instruction_code': ObjectIdentifier,
'invalidity_date': GeneralizedTime,
'certificate_issuer': GeneralNames,
}
class CRLEntryExtensions(SequenceOf):
_child_spec = CRLEntryExtension
class RevokedCertificate(Sequence):
_fields = [
('user_certificate', Integer),
('revocation_date', Time),
('crl_entry_extensions', CRLEntryExtensions, {'optional': True}),
]
_processed_extensions = False
_critical_extensions = None
_crl_reason_value = None
_invalidity_date_value = None
_certificate_issuer_value = None
_issuer_name = False
def _set_extensions(self):
"""
Sets common named extensions to private attributes and creates a list
of critical extensions
"""
self._critical_extensions = set()
for extension in self['crl_entry_extensions']:
name = extension['extn_id'].native
attribute_name = '_%s_value' % name
if hasattr(self, attribute_name):
setattr(self, attribute_name, extension['extn_value'].parsed)
if extension['critical'].native:
self._critical_extensions.add(name)
self._processed_extensions = True
@property
def critical_extensions(self):
"""
Returns a set of the names (or OID if not a known extension) of the
extensions marked as critical
:return:
A set of unicode strings
"""
if not self._processed_extensions:
self._set_extensions()
return self._critical_extensions
@property
def crl_reason_value(self):
"""
This extension indicates the reason that a certificate was revoked.
:return:
None or a CRLReason object
"""
if self._processed_extensions is False:
self._set_extensions()
return self._crl_reason_value
@property
def invalidity_date_value(self):
"""
This extension indicates the suspected date/time the private key was
compromised or the certificate became invalid. This would usually be
before the revocation date, which is when the CA processed the
revocation.
:return:
None or a GeneralizedTime object
"""
if self._processed_extensions is False:
self._set_extensions()
return self._invalidity_date_value
@property
def certificate_issuer_value(self):
"""
This extension indicates the issuer of the certificate in question,
and is used in indirect CRLs. CRL entries without this extension are
for certificates issued from the last seen issuer.
:return:
None or an x509.GeneralNames object
"""
if self._processed_extensions is False:
self._set_extensions()
return self._certificate_issuer_value
@property
def issuer_name(self):
"""
:return:
None, or an asn1crypto.x509.Name object for the issuer of the cert
"""
if self._issuer_name is False:
self._issuer_name = None
if self.certificate_issuer_value:
for general_name in self.certificate_issuer_value:
if general_name.name == 'directory_name':
self._issuer_name = general_name.chosen
break
return self._issuer_name
class RevokedCertificates(SequenceOf):
_child_spec = RevokedCertificate
class TbsCertList(Sequence):
_fields = [
('version', Version, {'optional': True}),
('signature', SignedDigestAlgorithm),
('issuer', Name),
('this_update', Time),
('next_update', Time, {'optional': True}),
('revoked_certificates', RevokedCertificates, {'optional': True}),
('crl_extensions', TBSCertListExtensions, {'explicit': 0, 'optional': True}),
]
class CertificateList(Sequence):
_fields = [
('tbs_cert_list', TbsCertList),
('signature_algorithm', SignedDigestAlgorithm),
('signature', OctetBitString),
]
_processed_extensions = False
_critical_extensions = None
_issuer_alt_name_value = None
_crl_number_value = None
_delta_crl_indicator_value = None
_issuing_distribution_point_value = None
_authority_key_identifier_value = None
_freshest_crl_value = None
_authority_information_access_value = None
_issuer_cert_urls = None
_delta_crl_distribution_points = None
_sha1 = None
_sha256 = None
def _set_extensions(self):
"""
Sets common named extensions to private attributes and creates a list
of critical extensions
"""
self._critical_extensions = set()
for extension in self['tbs_cert_list']['crl_extensions']:
name = extension['extn_id'].native
attribute_name = '_%s_value' % name
if hasattr(self, attribute_name):
setattr(self, attribute_name, extension['extn_value'].parsed)
if extension['critical'].native:
self._critical_extensions.add(name)
self._processed_extensions = True
@property
def critical_extensions(self):
"""
Returns a set of the names (or OID if not a known extension) of the
extensions marked as critical
:return:
A set of unicode strings
"""
if not self._processed_extensions:
self._set_extensions()
return self._critical_extensions
@property
def issuer_alt_name_value(self):
"""
This extension allows associating one or more alternative names with
the issuer of the CRL.
:return:
None or an x509.GeneralNames object
"""
if self._processed_extensions is False:
self._set_extensions()
return self._issuer_alt_name_value
@property
def crl_number_value(self):
"""
This extension adds a monotonically increasing number to the CRL and is
used to distinguish different versions of the CRL.
:return:
None or an Integer object
"""
if self._processed_extensions is False:
self._set_extensions()
return self._crl_number_value
@property
def delta_crl_indicator_value(self):
"""
This extension indicates a CRL is a delta CRL, and contains the CRL
number of the base CRL that it is a delta from.
:return:
None or an Integer object
"""
if self._processed_extensions is False:
self._set_extensions()
return self._delta_crl_indicator_value
@property
def issuing_distribution_point_value(self):
"""
This extension includes information about what types of revocations
and certificates are part of the CRL.
:return:
None or an IssuingDistributionPoint object
"""
if self._processed_extensions is False:
self._set_extensions()
return self._issuing_distribution_point_value
@property
def authority_key_identifier_value(self):
"""
This extension helps in identifying the public key with which to
validate the authenticity of the CRL.
:return:
None or an AuthorityKeyIdentifier object
"""
if self._processed_extensions is False:
self._set_extensions()
return self._authority_key_identifier_value
@property
def freshest_crl_value(self):
"""
This extension is used in complete CRLs to indicate where a delta CRL
may be located.
:return:
None or a CRLDistributionPoints object
"""
if self._processed_extensions is False:
self._set_extensions()
return self._freshest_crl_value
@property
def authority_information_access_value(self):
"""
This extension is used to provide a URL with which to download the
certificate used to sign this CRL.
:return:
None or an AuthorityInfoAccessSyntax object
"""
if self._processed_extensions is False:
self._set_extensions()
return self._authority_information_access_value
@property
def issuer(self):
"""
:return:
An asn1crypto.x509.Name object for the issuer of the CRL
"""
return self['tbs_cert_list']['issuer']
@property
def authority_key_identifier(self):
"""
:return:
None or a byte string of the key_identifier from the authority key
identifier extension
"""
if not self.authority_key_identifier_value:
return None
return self.authority_key_identifier_value['key_identifier'].native
@property
def issuer_cert_urls(self):
"""
:return:
A list of unicode strings that are URLs that should contain either
an individual DER-encoded X.509 certificate, or a DER-encoded CMS
message containing multiple certificates
"""
if self._issuer_cert_urls is None:
self._issuer_cert_urls = []
if self.authority_information_access_value:
for entry in self.authority_information_access_value:
if entry['access_method'].native == 'ca_issuers':
location = entry['access_location']
if location.name != 'uniform_resource_identifier':
continue
url = location.native
if url.lower()[0:7] == 'http://':
self._issuer_cert_urls.append(url)
return self._issuer_cert_urls
@property
def delta_crl_distribution_points(self):
"""
Returns delta CRL URLs - only applies to complete CRLs
:return:
A list of zero or more DistributionPoint objects
"""
if self._delta_crl_distribution_points is None:
self._delta_crl_distribution_points = []
if self.freshest_crl_value is not None:
for distribution_point in self.freshest_crl_value:
distribution_point_name = distribution_point['distribution_point']
# RFC 5280 indicates conforming CA should not use the relative form
if distribution_point_name.name == 'name_relative_to_crl_issuer':
continue
# This library is currently only concerned with HTTP-based CRLs
for general_name in distribution_point_name.chosen:
if general_name.name == 'uniform_resource_identifier':
self._delta_crl_distribution_points.append(distribution_point)
return self._delta_crl_distribution_points
@property
def signature(self):
"""
:return:
A byte string of the signature
"""
return self['signature'].native
@property
def sha1(self):
"""
:return:
The SHA1 hash of the DER-encoded bytes of this certificate list
"""
if self._sha1 is None:
self._sha1 = hashlib.sha1(self.dump()).digest()
return self._sha1
@property
def sha256(self):
"""
:return:
The SHA-256 hash of the DER-encoded bytes of this certificate list
"""
if self._sha256 is None:
self._sha256 = hashlib.sha256(self.dump()).digest()
return self._sha256

View File

@ -0,0 +1,133 @@
# coding: utf-8
"""
ASN.1 type classes for certificate signing requests (CSR). Exports the
following items:
- CertificationRequest()
Other type classes are defined that help compose the types listed above.
"""
from __future__ import unicode_literals, division, absolute_import, print_function
from .algos import SignedDigestAlgorithm
from .core import (
Any,
BitString,
BMPString,
Integer,
ObjectIdentifier,
OctetBitString,
Sequence,
SetOf,
UTF8String
)
from .keys import PublicKeyInfo
from .x509 import DirectoryString, Extensions, Name
# The structures in this file are taken from https://tools.ietf.org/html/rfc2986
# and https://tools.ietf.org/html/rfc2985
class Version(Integer):
_map = {
0: 'v1',
}
class CSRAttributeType(ObjectIdentifier):
_map = {
'1.2.840.113549.1.9.7': 'challenge_password',
'1.2.840.113549.1.9.9': 'extended_certificate_attributes',
'1.2.840.113549.1.9.14': 'extension_request',
# https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-wcce/a5eaae36-e9f3-4dc5-a687-bfa7115954f1
'1.3.6.1.4.1.311.13.2.2': 'microsoft_enrollment_csp_provider',
# https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-wcce/7c677cba-030d-48be-ba2b-01e407705f34
'1.3.6.1.4.1.311.13.2.3': 'microsoft_os_version',
# https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-wcce/64e5ff6d-c6dd-4578-92f7-b3d895f9b9c7
'1.3.6.1.4.1.311.21.20': 'microsoft_request_client_info',
}
class SetOfDirectoryString(SetOf):
_child_spec = DirectoryString
class Attribute(Sequence):
_fields = [
('type', ObjectIdentifier),
('values', SetOf, {'spec': Any}),
]
class SetOfAttributes(SetOf):
_child_spec = Attribute
class SetOfExtensions(SetOf):
_child_spec = Extensions
class MicrosoftEnrollmentCSProvider(Sequence):
_fields = [
('keyspec', Integer),
('cspname', BMPString), # cryptographic service provider name
('signature', BitString),
]
class SetOfMicrosoftEnrollmentCSProvider(SetOf):
_child_spec = MicrosoftEnrollmentCSProvider
class MicrosoftRequestClientInfo(Sequence):
_fields = [
('clientid', Integer),
('machinename', UTF8String),
('username', UTF8String),
('processname', UTF8String),
]
class SetOfMicrosoftRequestClientInfo(SetOf):
_child_spec = MicrosoftRequestClientInfo
class CRIAttribute(Sequence):
_fields = [
('type', CSRAttributeType),
('values', Any),
]
_oid_pair = ('type', 'values')
_oid_specs = {
'challenge_password': SetOfDirectoryString,
'extended_certificate_attributes': SetOfAttributes,
'extension_request': SetOfExtensions,
'microsoft_enrollment_csp_provider': SetOfMicrosoftEnrollmentCSProvider,
'microsoft_os_version': SetOfDirectoryString,
'microsoft_request_client_info': SetOfMicrosoftRequestClientInfo,
}
class CRIAttributes(SetOf):
_child_spec = CRIAttribute
class CertificationRequestInfo(Sequence):
_fields = [
('version', Version),
('subject', Name),
('subject_pk_info', PublicKeyInfo),
('attributes', CRIAttributes, {'implicit': 0, 'optional': True}),
]
class CertificationRequest(Sequence):
_fields = [
('certification_request_info', CertificationRequestInfo),
('signature_algorithm', SignedDigestAlgorithm),
('signature', OctetBitString),
]

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,703 @@
# coding: utf-8
"""
ASN.1 type classes for the online certificate status protocol (OCSP). Exports
the following items:
- OCSPRequest()
- OCSPResponse()
Other type classes are defined that help compose the types listed above.
"""
from __future__ import unicode_literals, division, absolute_import, print_function
from ._errors import unwrap
from .algos import DigestAlgorithm, SignedDigestAlgorithm
from .core import (
Boolean,
Choice,
Enumerated,
GeneralizedTime,
IA5String,
Integer,
Null,
ObjectIdentifier,
OctetBitString,
OctetString,
ParsableOctetString,
Sequence,
SequenceOf,
)
from .crl import AuthorityInfoAccessSyntax, CRLReason
from .keys import PublicKeyAlgorithm
from .x509 import Certificate, GeneralName, GeneralNames, Name
# The structures in this file are taken from https://tools.ietf.org/html/rfc6960
class Version(Integer):
_map = {
0: 'v1'
}
class CertId(Sequence):
_fields = [
('hash_algorithm', DigestAlgorithm),
('issuer_name_hash', OctetString),
('issuer_key_hash', OctetString),
('serial_number', Integer),
]
class ServiceLocator(Sequence):
_fields = [
('issuer', Name),
('locator', AuthorityInfoAccessSyntax),
]
class RequestExtensionId(ObjectIdentifier):
_map = {
'1.3.6.1.5.5.7.48.1.7': 'service_locator',
}
class RequestExtension(Sequence):
_fields = [
('extn_id', RequestExtensionId),
('critical', Boolean, {'default': False}),
('extn_value', ParsableOctetString),
]
_oid_pair = ('extn_id', 'extn_value')
_oid_specs = {
'service_locator': ServiceLocator,
}
class RequestExtensions(SequenceOf):
_child_spec = RequestExtension
class Request(Sequence):
_fields = [
('req_cert', CertId),
('single_request_extensions', RequestExtensions, {'explicit': 0, 'optional': True}),
]
_processed_extensions = False
_critical_extensions = None
_service_locator_value = None
def _set_extensions(self):
"""
Sets common named extensions to private attributes and creates a list
of critical extensions
"""
self._critical_extensions = set()
for extension in self['single_request_extensions']:
name = extension['extn_id'].native
attribute_name = '_%s_value' % name
if hasattr(self, attribute_name):
setattr(self, attribute_name, extension['extn_value'].parsed)
if extension['critical'].native:
self._critical_extensions.add(name)
self._processed_extensions = True
@property
def critical_extensions(self):
"""
Returns a set of the names (or OID if not a known extension) of the
extensions marked as critical
:return:
A set of unicode strings
"""
if not self._processed_extensions:
self._set_extensions()
return self._critical_extensions
@property
def service_locator_value(self):
"""
This extension is used when communicating with an OCSP responder that
acts as a proxy for OCSP requests
:return:
None or a ServiceLocator object
"""
if self._processed_extensions is False:
self._set_extensions()
return self._service_locator_value
class Requests(SequenceOf):
_child_spec = Request
class ResponseType(ObjectIdentifier):
_map = {
'1.3.6.1.5.5.7.48.1.1': 'basic_ocsp_response',
}
class AcceptableResponses(SequenceOf):
_child_spec = ResponseType
class PreferredSignatureAlgorithm(Sequence):
_fields = [
('sig_identifier', SignedDigestAlgorithm),
('cert_identifier', PublicKeyAlgorithm, {'optional': True}),
]
class PreferredSignatureAlgorithms(SequenceOf):
_child_spec = PreferredSignatureAlgorithm
class TBSRequestExtensionId(ObjectIdentifier):
_map = {
'1.3.6.1.5.5.7.48.1.2': 'nonce',
'1.3.6.1.5.5.7.48.1.4': 'acceptable_responses',
'1.3.6.1.5.5.7.48.1.8': 'preferred_signature_algorithms',
}
class TBSRequestExtension(Sequence):
_fields = [
('extn_id', TBSRequestExtensionId),
('critical', Boolean, {'default': False}),
('extn_value', ParsableOctetString),
]
_oid_pair = ('extn_id', 'extn_value')
_oid_specs = {
'nonce': OctetString,
'acceptable_responses': AcceptableResponses,
'preferred_signature_algorithms': PreferredSignatureAlgorithms,
}
class TBSRequestExtensions(SequenceOf):
_child_spec = TBSRequestExtension
class TBSRequest(Sequence):
_fields = [
('version', Version, {'explicit': 0, 'default': 'v1'}),
('requestor_name', GeneralName, {'explicit': 1, 'optional': True}),
('request_list', Requests),
('request_extensions', TBSRequestExtensions, {'explicit': 2, 'optional': True}),
]
class Certificates(SequenceOf):
_child_spec = Certificate
class Signature(Sequence):
_fields = [
('signature_algorithm', SignedDigestAlgorithm),
('signature', OctetBitString),
('certs', Certificates, {'explicit': 0, 'optional': True}),
]
class OCSPRequest(Sequence):
_fields = [
('tbs_request', TBSRequest),
('optional_signature', Signature, {'explicit': 0, 'optional': True}),
]
_processed_extensions = False
_critical_extensions = None
_nonce_value = None
_acceptable_responses_value = None
_preferred_signature_algorithms_value = None
def _set_extensions(self):
"""
Sets common named extensions to private attributes and creates a list
of critical extensions
"""
self._critical_extensions = set()
for extension in self['tbs_request']['request_extensions']:
name = extension['extn_id'].native
attribute_name = '_%s_value' % name
if hasattr(self, attribute_name):
setattr(self, attribute_name, extension['extn_value'].parsed)
if extension['critical'].native:
self._critical_extensions.add(name)
self._processed_extensions = True
@property
def critical_extensions(self):
"""
Returns a set of the names (or OID if not a known extension) of the
extensions marked as critical
:return:
A set of unicode strings
"""
if not self._processed_extensions:
self._set_extensions()
return self._critical_extensions
@property
def nonce_value(self):
"""
This extension is used to prevent replay attacks by including a unique,
random value with each request/response pair
:return:
None or an OctetString object
"""
if self._processed_extensions is False:
self._set_extensions()
return self._nonce_value
@property
def acceptable_responses_value(self):
"""
This extension is used to allow the client and server to communicate
with alternative response formats other than just basic_ocsp_response,
although no other formats are defined in the standard.
:return:
None or an AcceptableResponses object
"""
if self._processed_extensions is False:
self._set_extensions()
return self._acceptable_responses_value
@property
def preferred_signature_algorithms_value(self):
"""
This extension is used by the client to define what signature algorithms
are preferred, including both the hash algorithm and the public key
algorithm, with a level of detail down to even the public key algorithm
parameters, such as curve name.
:return:
None or a PreferredSignatureAlgorithms object
"""
if self._processed_extensions is False:
self._set_extensions()
return self._preferred_signature_algorithms_value
class OCSPResponseStatus(Enumerated):
_map = {
0: 'successful',
1: 'malformed_request',
2: 'internal_error',
3: 'try_later',
5: 'sign_required',
6: 'unauthorized',
}
class ResponderId(Choice):
_alternatives = [
('by_name', Name, {'explicit': 1}),
('by_key', OctetString, {'explicit': 2}),
]
# Custom class to return a meaningful .native attribute from CertStatus()
class StatusGood(Null):
def set(self, value):
"""
Sets the value of the object
:param value:
None or 'good'
"""
if value is not None and value != 'good' and not isinstance(value, Null):
raise ValueError(unwrap(
'''
value must be one of None, "good", not %s
''',
repr(value)
))
self.contents = b''
@property
def native(self):
return 'good'
# Custom class to return a meaningful .native attribute from CertStatus()
class StatusUnknown(Null):
def set(self, value):
"""
Sets the value of the object
:param value:
None or 'unknown'
"""
if value is not None and value != 'unknown' and not isinstance(value, Null):
raise ValueError(unwrap(
'''
value must be one of None, "unknown", not %s
''',
repr(value)
))
self.contents = b''
@property
def native(self):
return 'unknown'
class RevokedInfo(Sequence):
_fields = [
('revocation_time', GeneralizedTime),
('revocation_reason', CRLReason, {'explicit': 0, 'optional': True}),
]
class CertStatus(Choice):
_alternatives = [
('good', StatusGood, {'implicit': 0}),
('revoked', RevokedInfo, {'implicit': 1}),
('unknown', StatusUnknown, {'implicit': 2}),
]
class CrlId(Sequence):
_fields = [
('crl_url', IA5String, {'explicit': 0, 'optional': True}),
('crl_num', Integer, {'explicit': 1, 'optional': True}),
('crl_time', GeneralizedTime, {'explicit': 2, 'optional': True}),
]
class SingleResponseExtensionId(ObjectIdentifier):
_map = {
'1.3.6.1.5.5.7.48.1.3': 'crl',
'1.3.6.1.5.5.7.48.1.6': 'archive_cutoff',
# These are CRLEntryExtension values from
# https://tools.ietf.org/html/rfc5280
'2.5.29.21': 'crl_reason',
'2.5.29.24': 'invalidity_date',
'2.5.29.29': 'certificate_issuer',
# https://tools.ietf.org/html/rfc6962.html#page-13
'1.3.6.1.4.1.11129.2.4.5': 'signed_certificate_timestamp_list',
}
class SingleResponseExtension(Sequence):
_fields = [
('extn_id', SingleResponseExtensionId),
('critical', Boolean, {'default': False}),
('extn_value', ParsableOctetString),
]
_oid_pair = ('extn_id', 'extn_value')
_oid_specs = {
'crl': CrlId,
'archive_cutoff': GeneralizedTime,
'crl_reason': CRLReason,
'invalidity_date': GeneralizedTime,
'certificate_issuer': GeneralNames,
'signed_certificate_timestamp_list': OctetString,
}
class SingleResponseExtensions(SequenceOf):
_child_spec = SingleResponseExtension
class SingleResponse(Sequence):
_fields = [
('cert_id', CertId),
('cert_status', CertStatus),
('this_update', GeneralizedTime),
('next_update', GeneralizedTime, {'explicit': 0, 'optional': True}),
('single_extensions', SingleResponseExtensions, {'explicit': 1, 'optional': True}),
]
_processed_extensions = False
_critical_extensions = None
_crl_value = None
_archive_cutoff_value = None
_crl_reason_value = None
_invalidity_date_value = None
_certificate_issuer_value = None
def _set_extensions(self):
"""
Sets common named extensions to private attributes and creates a list
of critical extensions
"""
self._critical_extensions = set()
for extension in self['single_extensions']:
name = extension['extn_id'].native
attribute_name = '_%s_value' % name
if hasattr(self, attribute_name):
setattr(self, attribute_name, extension['extn_value'].parsed)
if extension['critical'].native:
self._critical_extensions.add(name)
self._processed_extensions = True
@property
def critical_extensions(self):
"""
Returns a set of the names (or OID if not a known extension) of the
extensions marked as critical
:return:
A set of unicode strings
"""
if not self._processed_extensions:
self._set_extensions()
return self._critical_extensions
@property
def crl_value(self):
"""
This extension is used to locate the CRL that a certificate's revocation
is contained within.
:return:
None or a CrlId object
"""
if self._processed_extensions is False:
self._set_extensions()
return self._crl_value
@property
def archive_cutoff_value(self):
"""
This extension is used to indicate the date at which an archived
(historical) certificate status entry will no longer be available.
:return:
None or a GeneralizedTime object
"""
if self._processed_extensions is False:
self._set_extensions()
return self._archive_cutoff_value
@property
def crl_reason_value(self):
"""
This extension indicates the reason that a certificate was revoked.
:return:
None or a CRLReason object
"""
if self._processed_extensions is False:
self._set_extensions()
return self._crl_reason_value
@property
def invalidity_date_value(self):
"""
This extension indicates the suspected date/time the private key was
compromised or the certificate became invalid. This would usually be
before the revocation date, which is when the CA processed the
revocation.
:return:
None or a GeneralizedTime object
"""
if self._processed_extensions is False:
self._set_extensions()
return self._invalidity_date_value
@property
def certificate_issuer_value(self):
"""
This extension indicates the issuer of the certificate in question.
:return:
None or an x509.GeneralNames object
"""
if self._processed_extensions is False:
self._set_extensions()
return self._certificate_issuer_value
class Responses(SequenceOf):
_child_spec = SingleResponse
class ResponseDataExtensionId(ObjectIdentifier):
_map = {
'1.3.6.1.5.5.7.48.1.2': 'nonce',
'1.3.6.1.5.5.7.48.1.9': 'extended_revoke',
}
class ResponseDataExtension(Sequence):
_fields = [
('extn_id', ResponseDataExtensionId),
('critical', Boolean, {'default': False}),
('extn_value', ParsableOctetString),
]
_oid_pair = ('extn_id', 'extn_value')
_oid_specs = {
'nonce': OctetString,
'extended_revoke': Null,
}
class ResponseDataExtensions(SequenceOf):
_child_spec = ResponseDataExtension
class ResponseData(Sequence):
_fields = [
('version', Version, {'explicit': 0, 'default': 'v1'}),
('responder_id', ResponderId),
('produced_at', GeneralizedTime),
('responses', Responses),
('response_extensions', ResponseDataExtensions, {'explicit': 1, 'optional': True}),
]
class BasicOCSPResponse(Sequence):
_fields = [
('tbs_response_data', ResponseData),
('signature_algorithm', SignedDigestAlgorithm),
('signature', OctetBitString),
('certs', Certificates, {'explicit': 0, 'optional': True}),
]
class ResponseBytes(Sequence):
_fields = [
('response_type', ResponseType),
('response', ParsableOctetString),
]
_oid_pair = ('response_type', 'response')
_oid_specs = {
'basic_ocsp_response': BasicOCSPResponse,
}
class OCSPResponse(Sequence):
_fields = [
('response_status', OCSPResponseStatus),
('response_bytes', ResponseBytes, {'explicit': 0, 'optional': True}),
]
_processed_extensions = False
_critical_extensions = None
_nonce_value = None
_extended_revoke_value = None
def _set_extensions(self):
"""
Sets common named extensions to private attributes and creates a list
of critical extensions
"""
self._critical_extensions = set()
for extension in self['response_bytes']['response'].parsed['tbs_response_data']['response_extensions']:
name = extension['extn_id'].native
attribute_name = '_%s_value' % name
if hasattr(self, attribute_name):
setattr(self, attribute_name, extension['extn_value'].parsed)
if extension['critical'].native:
self._critical_extensions.add(name)
self._processed_extensions = True
@property
def critical_extensions(self):
"""
Returns a set of the names (or OID if not a known extension) of the
extensions marked as critical
:return:
A set of unicode strings
"""
if not self._processed_extensions:
self._set_extensions()
return self._critical_extensions
@property
def nonce_value(self):
"""
This extension is used to prevent replay attacks on the request/response
exchange
:return:
None or an OctetString object
"""
if self._processed_extensions is False:
self._set_extensions()
return self._nonce_value
@property
def extended_revoke_value(self):
"""
This extension is used to signal that the responder will return a
"revoked" status for non-issued certificates.
:return:
None or a Null object (if present)
"""
if self._processed_extensions is False:
self._set_extensions()
return self._extended_revoke_value
@property
def basic_ocsp_response(self):
"""
A shortcut into the BasicOCSPResponse sequence
:return:
None or an asn1crypto.ocsp.BasicOCSPResponse object
"""
return self['response_bytes']['response'].parsed
@property
def response_data(self):
"""
A shortcut into the parsed, ResponseData sequence
:return:
None or an asn1crypto.ocsp.ResponseData object
"""
return self['response_bytes']['response'].parsed['tbs_response_data']

View File

@ -0,0 +1,292 @@
# coding: utf-8
"""
Functions for parsing and dumping using the ASN.1 DER encoding. Exports the
following items:
- emit()
- parse()
- peek()
Other type classes are defined that help compose the types listed above.
"""
from __future__ import unicode_literals, division, absolute_import, print_function
import sys
from ._types import byte_cls, chr_cls, type_name
from .util import int_from_bytes, int_to_bytes
_PY2 = sys.version_info <= (3,)
_INSUFFICIENT_DATA_MESSAGE = 'Insufficient data - %s bytes requested but only %s available'
_MAX_DEPTH = 10
def emit(class_, method, tag, contents):
"""
Constructs a byte string of an ASN.1 DER-encoded value
This is typically not useful. Instead, use one of the standard classes from
asn1crypto.core, or construct a new class with specific fields, and call the
.dump() method.
:param class_:
An integer ASN.1 class value: 0 (universal), 1 (application),
2 (context), 3 (private)
:param method:
An integer ASN.1 method value: 0 (primitive), 1 (constructed)
:param tag:
An integer ASN.1 tag value
:param contents:
A byte string of the encoded byte contents
:return:
A byte string of the ASN.1 DER value (header and contents)
"""
if not isinstance(class_, int):
raise TypeError('class_ must be an integer, not %s' % type_name(class_))
if class_ < 0 or class_ > 3:
raise ValueError('class_ must be one of 0, 1, 2 or 3, not %s' % class_)
if not isinstance(method, int):
raise TypeError('method must be an integer, not %s' % type_name(method))
if method < 0 or method > 1:
raise ValueError('method must be 0 or 1, not %s' % method)
if not isinstance(tag, int):
raise TypeError('tag must be an integer, not %s' % type_name(tag))
if tag < 0:
raise ValueError('tag must be greater than zero, not %s' % tag)
if not isinstance(contents, byte_cls):
raise TypeError('contents must be a byte string, not %s' % type_name(contents))
return _dump_header(class_, method, tag, contents) + contents
def parse(contents, strict=False):
"""
Parses a byte string of ASN.1 BER/DER-encoded data.
This is typically not useful. Instead, use one of the standard classes from
asn1crypto.core, or construct a new class with specific fields, and call the
.load() class method.
:param contents:
A byte string of BER/DER-encoded data
:param strict:
A boolean indicating if trailing data should be forbidden - if so, a
ValueError will be raised when trailing data exists
:raises:
ValueError - when the contents do not contain an ASN.1 header or are truncated in some way
TypeError - when contents is not a byte string
:return:
A 6-element tuple:
- 0: integer class (0 to 3)
- 1: integer method
- 2: integer tag
- 3: byte string header
- 4: byte string content
- 5: byte string trailer
"""
if not isinstance(contents, byte_cls):
raise TypeError('contents must be a byte string, not %s' % type_name(contents))
contents_len = len(contents)
info, consumed = _parse(contents, contents_len)
if strict and consumed != contents_len:
raise ValueError('Extra data - %d bytes of trailing data were provided' % (contents_len - consumed))
return info
def peek(contents):
"""
Parses a byte string of ASN.1 BER/DER-encoded data to find the length
This is typically used to look into an encoded value to see how long the
next chunk of ASN.1-encoded data is. Primarily it is useful when a
value is a concatenation of multiple values.
:param contents:
A byte string of BER/DER-encoded data
:raises:
ValueError - when the contents do not contain an ASN.1 header or are truncated in some way
TypeError - when contents is not a byte string
:return:
An integer with the number of bytes occupied by the ASN.1 value
"""
if not isinstance(contents, byte_cls):
raise TypeError('contents must be a byte string, not %s' % type_name(contents))
info, consumed = _parse(contents, len(contents))
return consumed
def _parse(encoded_data, data_len, pointer=0, lengths_only=False, depth=0):
"""
Parses a byte string into component parts
:param encoded_data:
A byte string that contains BER-encoded data
:param data_len:
The integer length of the encoded data
:param pointer:
The index in the byte string to parse from
:param lengths_only:
A boolean to cause the call to return a 2-element tuple of the integer
number of bytes in the header and the integer number of bytes in the
contents. Internal use only.
:param depth:
The recursion depth when evaluating indefinite-length encoding.
:return:
A 2-element tuple:
- 0: A tuple of (class_, method, tag, header, content, trailer)
- 1: An integer indicating how many bytes were consumed
"""
if depth > _MAX_DEPTH:
raise ValueError('Indefinite-length recursion limit exceeded')
start = pointer
if data_len < pointer + 1:
raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (1, data_len - pointer))
first_octet = ord(encoded_data[pointer]) if _PY2 else encoded_data[pointer]
pointer += 1
tag = first_octet & 31
constructed = (first_octet >> 5) & 1
# Base 128 length using 8th bit as continuation indicator
if tag == 31:
tag = 0
while True:
if data_len < pointer + 1:
raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (1, data_len - pointer))
num = ord(encoded_data[pointer]) if _PY2 else encoded_data[pointer]
pointer += 1
if num == 0x80 and tag == 0:
raise ValueError('Non-minimal tag encoding')
tag *= 128
tag += num & 127
if num >> 7 == 0:
break
if tag < 31:
raise ValueError('Non-minimal tag encoding')
if data_len < pointer + 1:
raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (1, data_len - pointer))
length_octet = ord(encoded_data[pointer]) if _PY2 else encoded_data[pointer]
pointer += 1
trailer = b''
if length_octet >> 7 == 0:
contents_end = pointer + (length_octet & 127)
else:
length_octets = length_octet & 127
if length_octets:
if data_len < pointer + length_octets:
raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (length_octets, data_len - pointer))
pointer += length_octets
contents_end = pointer + int_from_bytes(encoded_data[pointer - length_octets:pointer], signed=False)
else:
# To properly parse indefinite length values, we need to scan forward
# parsing headers until we find a value with a length of zero. If we
# just scanned looking for \x00\x00, nested indefinite length values
# would not work.
if not constructed:
raise ValueError('Indefinite-length element must be constructed')
contents_end = pointer
while data_len < contents_end + 2 or encoded_data[contents_end:contents_end+2] != b'\x00\x00':
_, contents_end = _parse(encoded_data, data_len, contents_end, lengths_only=True, depth=depth+1)
contents_end += 2
trailer = b'\x00\x00'
if contents_end > data_len:
raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (contents_end - pointer, data_len - pointer))
if lengths_only:
return (pointer, contents_end)
return (
(
first_octet >> 6,
constructed,
tag,
encoded_data[start:pointer],
encoded_data[pointer:contents_end-len(trailer)],
trailer
),
contents_end
)
def _dump_header(class_, method, tag, contents):
"""
Constructs the header bytes for an ASN.1 object
:param class_:
An integer ASN.1 class value: 0 (universal), 1 (application),
2 (context), 3 (private)
:param method:
An integer ASN.1 method value: 0 (primitive), 1 (constructed)
:param tag:
An integer ASN.1 tag value
:param contents:
A byte string of the encoded byte contents
:return:
A byte string of the ASN.1 DER header
"""
header = b''
id_num = 0
id_num |= class_ << 6
id_num |= method << 5
if tag >= 31:
cont_bit = 0
while tag > 0:
header = chr_cls(cont_bit | (tag & 0x7f)) + header
if not cont_bit:
cont_bit = 0x80
tag = tag >> 7
header = chr_cls(id_num | 31) + header
else:
header += chr_cls(id_num | tag)
length = len(contents)
if length <= 127:
header += chr_cls(length)
else:
length_bytes = int_to_bytes(length)
header += chr_cls(0x80 | len(length_bytes))
header += length_bytes
return header

View File

@ -0,0 +1,84 @@
# coding: utf-8
"""
ASN.1 type classes for PDF signature structures. Adds extra oid mapping and
value parsing to asn1crypto.x509.Extension() and asn1crypto.xms.CMSAttribute().
"""
from __future__ import unicode_literals, division, absolute_import, print_function
from .cms import CMSAttributeType, CMSAttribute
from .core import (
Boolean,
Integer,
Null,
ObjectIdentifier,
OctetString,
Sequence,
SequenceOf,
SetOf,
)
from .crl import CertificateList
from .ocsp import OCSPResponse
from .x509 import (
Extension,
ExtensionId,
GeneralName,
KeyPurposeId,
)
class AdobeArchiveRevInfo(Sequence):
_fields = [
('version', Integer)
]
class AdobeTimestamp(Sequence):
_fields = [
('version', Integer),
('location', GeneralName),
('requires_auth', Boolean, {'optional': True, 'default': False}),
]
class OtherRevInfo(Sequence):
_fields = [
('type', ObjectIdentifier),
('value', OctetString),
]
class SequenceOfCertificateList(SequenceOf):
_child_spec = CertificateList
class SequenceOfOCSPResponse(SequenceOf):
_child_spec = OCSPResponse
class SequenceOfOtherRevInfo(SequenceOf):
_child_spec = OtherRevInfo
class RevocationInfoArchival(Sequence):
_fields = [
('crl', SequenceOfCertificateList, {'explicit': 0, 'optional': True}),
('ocsp', SequenceOfOCSPResponse, {'explicit': 1, 'optional': True}),
('other_rev_info', SequenceOfOtherRevInfo, {'explicit': 2, 'optional': True}),
]
class SetOfRevocationInfoArchival(SetOf):
_child_spec = RevocationInfoArchival
ExtensionId._map['1.2.840.113583.1.1.9.2'] = 'adobe_archive_rev_info'
ExtensionId._map['1.2.840.113583.1.1.9.1'] = 'adobe_timestamp'
ExtensionId._map['1.2.840.113583.1.1.10'] = 'adobe_ppklite_credential'
Extension._oid_specs['adobe_archive_rev_info'] = AdobeArchiveRevInfo
Extension._oid_specs['adobe_timestamp'] = AdobeTimestamp
Extension._oid_specs['adobe_ppklite_credential'] = Null
KeyPurposeId._map['1.2.840.113583.1.1.5'] = 'pdf_signing'
CMSAttributeType._map['1.2.840.113583.1.1.8'] = 'adobe_revocation_info_archival'
CMSAttribute._oid_specs['adobe_revocation_info_archival'] = SetOfRevocationInfoArchival

View File

@ -0,0 +1,222 @@
# coding: utf-8
"""
Encoding DER to PEM and decoding PEM to DER. Exports the following items:
- armor()
- detect()
- unarmor()
"""
from __future__ import unicode_literals, division, absolute_import, print_function
import base64
import re
import sys
from ._errors import unwrap
from ._types import type_name as _type_name, str_cls, byte_cls
if sys.version_info < (3,):
from cStringIO import StringIO as BytesIO
else:
from io import BytesIO
def detect(byte_string):
"""
Detect if a byte string seems to contain a PEM-encoded block
:param byte_string:
A byte string to look through
:return:
A boolean, indicating if a PEM-encoded block is contained in the byte
string
"""
if not isinstance(byte_string, byte_cls):
raise TypeError(unwrap(
'''
byte_string must be a byte string, not %s
''',
_type_name(byte_string)
))
return byte_string.find(b'-----BEGIN') != -1 or byte_string.find(b'---- BEGIN') != -1
def armor(type_name, der_bytes, headers=None):
"""
Armors a DER-encoded byte string in PEM
:param type_name:
A unicode string that will be capitalized and placed in the header
and footer of the block. E.g. "CERTIFICATE", "PRIVATE KEY", etc. This
will appear as "-----BEGIN CERTIFICATE-----" and
"-----END CERTIFICATE-----".
:param der_bytes:
A byte string to be armored
:param headers:
An OrderedDict of the header lines to write after the BEGIN line
:return:
A byte string of the PEM block
"""
if not isinstance(der_bytes, byte_cls):
raise TypeError(unwrap(
'''
der_bytes must be a byte string, not %s
''' % _type_name(der_bytes)
))
if not isinstance(type_name, str_cls):
raise TypeError(unwrap(
'''
type_name must be a unicode string, not %s
''',
_type_name(type_name)
))
type_name = type_name.upper().encode('ascii')
output = BytesIO()
output.write(b'-----BEGIN ')
output.write(type_name)
output.write(b'-----\n')
if headers:
for key in headers:
output.write(key.encode('ascii'))
output.write(b': ')
output.write(headers[key].encode('ascii'))
output.write(b'\n')
output.write(b'\n')
b64_bytes = base64.b64encode(der_bytes)
b64_len = len(b64_bytes)
i = 0
while i < b64_len:
output.write(b64_bytes[i:i + 64])
output.write(b'\n')
i += 64
output.write(b'-----END ')
output.write(type_name)
output.write(b'-----\n')
return output.getvalue()
def _unarmor(pem_bytes):
"""
Convert a PEM-encoded byte string into one or more DER-encoded byte strings
:param pem_bytes:
A byte string of the PEM-encoded data
:raises:
ValueError - when the pem_bytes do not appear to be PEM-encoded bytes
:return:
A generator of 3-element tuples in the format: (object_type, headers,
der_bytes). The object_type is a unicode string of what is between
"-----BEGIN " and "-----". Examples include: "CERTIFICATE",
"PUBLIC KEY", "PRIVATE KEY". The headers is a dict containing any lines
in the form "Name: Value" that are right after the begin line.
"""
if not isinstance(pem_bytes, byte_cls):
raise TypeError(unwrap(
'''
pem_bytes must be a byte string, not %s
''',
_type_name(pem_bytes)
))
# Valid states include: "trash", "headers", "body"
state = 'trash'
headers = {}
base64_data = b''
object_type = None
found_start = False
found_end = False
for line in pem_bytes.splitlines(False):
if line == b'':
continue
if state == "trash":
# Look for a starting line since some CA cert bundle show the cert
# into in a parsed format above each PEM block
type_name_match = re.match(b'^(?:---- |-----)BEGIN ([A-Z0-9 ]+)(?: ----|-----)', line)
if not type_name_match:
continue
object_type = type_name_match.group(1).decode('ascii')
found_start = True
state = 'headers'
continue
if state == 'headers':
if line.find(b':') == -1:
state = 'body'
else:
decoded_line = line.decode('ascii')
name, value = decoded_line.split(':', 1)
headers[name] = value.strip()
continue
if state == 'body':
if line[0:5] in (b'-----', b'---- '):
der_bytes = base64.b64decode(base64_data)
yield (object_type, headers, der_bytes)
state = 'trash'
headers = {}
base64_data = b''
object_type = None
found_end = True
continue
base64_data += line
if not found_start or not found_end:
raise ValueError(unwrap(
'''
pem_bytes does not appear to contain PEM-encoded data - no
BEGIN/END combination found
'''
))
def unarmor(pem_bytes, multiple=False):
"""
Convert a PEM-encoded byte string into a DER-encoded byte string
:param pem_bytes:
A byte string of the PEM-encoded data
:param multiple:
If True, function will return a generator
:raises:
ValueError - when the pem_bytes do not appear to be PEM-encoded bytes
:return:
A 3-element tuple (object_name, headers, der_bytes). The object_name is
a unicode string of what is between "-----BEGIN " and "-----". Examples
include: "CERTIFICATE", "PUBLIC KEY", "PRIVATE KEY". The headers is a
dict containing any lines in the form "Name: Value" that are right
after the begin line.
"""
generator = _unarmor(pem_bytes)
if not multiple:
return next(generator)
return generator

View File

@ -0,0 +1,193 @@
# coding: utf-8
"""
ASN.1 type classes for PKCS#12 files. Exports the following items:
- CertBag()
- CrlBag()
- Pfx()
- SafeBag()
- SecretBag()
Other type classes are defined that help compose the types listed above.
"""
from __future__ import unicode_literals, division, absolute_import, print_function
from .algos import DigestInfo
from .cms import ContentInfo, SignedData
from .core import (
Any,
BMPString,
Integer,
ObjectIdentifier,
OctetString,
ParsableOctetString,
Sequence,
SequenceOf,
SetOf,
)
from .keys import PrivateKeyInfo, EncryptedPrivateKeyInfo
from .x509 import Certificate, KeyPurposeId
# The structures in this file are taken from https://tools.ietf.org/html/rfc7292
class MacData(Sequence):
_fields = [
('mac', DigestInfo),
('mac_salt', OctetString),
('iterations', Integer, {'default': 1}),
]
class Version(Integer):
_map = {
3: 'v3'
}
class AttributeType(ObjectIdentifier):
_map = {
# https://tools.ietf.org/html/rfc2985#page-18
'1.2.840.113549.1.9.20': 'friendly_name',
'1.2.840.113549.1.9.21': 'local_key_id',
# https://support.microsoft.com/en-us/kb/287547
'1.3.6.1.4.1.311.17.1': 'microsoft_local_machine_keyset',
# https://github.com/frohoff/jdk8u-dev-jdk/blob/master/src/share/classes/sun/security/pkcs12/PKCS12KeyStore.java
# this is a set of OIDs, representing key usage, the usual value is a SET of one element OID 2.5.29.37.0
'2.16.840.1.113894.746875.1.1': 'trusted_key_usage',
}
class SetOfAny(SetOf):
_child_spec = Any
class SetOfBMPString(SetOf):
_child_spec = BMPString
class SetOfOctetString(SetOf):
_child_spec = OctetString
class SetOfKeyPurposeId(SetOf):
_child_spec = KeyPurposeId
class Attribute(Sequence):
_fields = [
('type', AttributeType),
('values', None),
]
_oid_specs = {
'friendly_name': SetOfBMPString,
'local_key_id': SetOfOctetString,
'microsoft_csp_name': SetOfBMPString,
'trusted_key_usage': SetOfKeyPurposeId,
}
def _values_spec(self):
return self._oid_specs.get(self['type'].native, SetOfAny)
_spec_callbacks = {
'values': _values_spec
}
class Attributes(SetOf):
_child_spec = Attribute
class Pfx(Sequence):
_fields = [
('version', Version),
('auth_safe', ContentInfo),
('mac_data', MacData, {'optional': True})
]
_authenticated_safe = None
@property
def authenticated_safe(self):
if self._authenticated_safe is None:
content = self['auth_safe']['content']
if isinstance(content, SignedData):
content = content['content_info']['content']
self._authenticated_safe = AuthenticatedSafe.load(content.native)
return self._authenticated_safe
class AuthenticatedSafe(SequenceOf):
_child_spec = ContentInfo
class BagId(ObjectIdentifier):
_map = {
'1.2.840.113549.1.12.10.1.1': 'key_bag',
'1.2.840.113549.1.12.10.1.2': 'pkcs8_shrouded_key_bag',
'1.2.840.113549.1.12.10.1.3': 'cert_bag',
'1.2.840.113549.1.12.10.1.4': 'crl_bag',
'1.2.840.113549.1.12.10.1.5': 'secret_bag',
'1.2.840.113549.1.12.10.1.6': 'safe_contents',
}
class CertId(ObjectIdentifier):
_map = {
'1.2.840.113549.1.9.22.1': 'x509',
'1.2.840.113549.1.9.22.2': 'sdsi',
}
class CertBag(Sequence):
_fields = [
('cert_id', CertId),
('cert_value', ParsableOctetString, {'explicit': 0}),
]
_oid_pair = ('cert_id', 'cert_value')
_oid_specs = {
'x509': Certificate,
}
class CrlBag(Sequence):
_fields = [
('crl_id', ObjectIdentifier),
('crl_value', OctetString, {'explicit': 0}),
]
class SecretBag(Sequence):
_fields = [
('secret_type_id', ObjectIdentifier),
('secret_value', OctetString, {'explicit': 0}),
]
class SafeContents(SequenceOf):
pass
class SafeBag(Sequence):
_fields = [
('bag_id', BagId),
('bag_value', Any, {'explicit': 0}),
('bag_attributes', Attributes, {'optional': True}),
]
_oid_pair = ('bag_id', 'bag_value')
_oid_specs = {
'key_bag': PrivateKeyInfo,
'pkcs8_shrouded_key_bag': EncryptedPrivateKeyInfo,
'cert_bag': CertBag,
'crl_bag': CrlBag,
'secret_bag': SecretBag,
'safe_contents': SafeContents
}
SafeContents._child_spec = SafeBag

View File

@ -0,0 +1,310 @@
# coding: utf-8
"""
ASN.1 type classes for the time stamp protocol (TSP). Exports the following
items:
- TimeStampReq()
- TimeStampResp()
Also adds TimeStampedData() support to asn1crypto.cms.ContentInfo(),
TimeStampedData() and TSTInfo() support to
asn1crypto.cms.EncapsulatedContentInfo() and some oids and value parsers to
asn1crypto.cms.CMSAttribute().
Other type classes are defined that help compose the types listed above.
"""
from __future__ import unicode_literals, division, absolute_import, print_function
from .algos import DigestAlgorithm
from .cms import (
CMSAttribute,
CMSAttributeType,
ContentInfo,
ContentType,
EncapsulatedContentInfo,
)
from .core import (
Any,
BitString,
Boolean,
Choice,
GeneralizedTime,
IA5String,
Integer,
ObjectIdentifier,
OctetString,
Sequence,
SequenceOf,
SetOf,
UTF8String,
)
from .crl import CertificateList
from .x509 import (
Attributes,
CertificatePolicies,
GeneralName,
GeneralNames,
)
# The structures in this file are based on https://tools.ietf.org/html/rfc3161,
# https://tools.ietf.org/html/rfc4998, https://tools.ietf.org/html/rfc5544,
# https://tools.ietf.org/html/rfc5035, https://tools.ietf.org/html/rfc2634
class Version(Integer):
_map = {
0: 'v0',
1: 'v1',
2: 'v2',
3: 'v3',
4: 'v4',
5: 'v5',
}
class MessageImprint(Sequence):
_fields = [
('hash_algorithm', DigestAlgorithm),
('hashed_message', OctetString),
]
class Accuracy(Sequence):
_fields = [
('seconds', Integer, {'optional': True}),
('millis', Integer, {'implicit': 0, 'optional': True}),
('micros', Integer, {'implicit': 1, 'optional': True}),
]
class Extension(Sequence):
_fields = [
('extn_id', ObjectIdentifier),
('critical', Boolean, {'default': False}),
('extn_value', OctetString),
]
class Extensions(SequenceOf):
_child_spec = Extension
class TSTInfo(Sequence):
_fields = [
('version', Version),
('policy', ObjectIdentifier),
('message_imprint', MessageImprint),
('serial_number', Integer),
('gen_time', GeneralizedTime),
('accuracy', Accuracy, {'optional': True}),
('ordering', Boolean, {'default': False}),
('nonce', Integer, {'optional': True}),
('tsa', GeneralName, {'explicit': 0, 'optional': True}),
('extensions', Extensions, {'implicit': 1, 'optional': True}),
]
class TimeStampReq(Sequence):
_fields = [
('version', Version),
('message_imprint', MessageImprint),
('req_policy', ObjectIdentifier, {'optional': True}),
('nonce', Integer, {'optional': True}),
('cert_req', Boolean, {'default': False}),
('extensions', Extensions, {'implicit': 0, 'optional': True}),
]
class PKIStatus(Integer):
_map = {
0: 'granted',
1: 'granted_with_mods',
2: 'rejection',
3: 'waiting',
4: 'revocation_warning',
5: 'revocation_notification',
}
class PKIFreeText(SequenceOf):
_child_spec = UTF8String
class PKIFailureInfo(BitString):
_map = {
0: 'bad_alg',
2: 'bad_request',
5: 'bad_data_format',
14: 'time_not_available',
15: 'unaccepted_policy',
16: 'unaccepted_extensions',
17: 'add_info_not_available',
25: 'system_failure',
}
class PKIStatusInfo(Sequence):
_fields = [
('status', PKIStatus),
('status_string', PKIFreeText, {'optional': True}),
('fail_info', PKIFailureInfo, {'optional': True}),
]
class TimeStampResp(Sequence):
_fields = [
('status', PKIStatusInfo),
('time_stamp_token', ContentInfo),
]
class MetaData(Sequence):
_fields = [
('hash_protected', Boolean),
('file_name', UTF8String, {'optional': True}),
('media_type', IA5String, {'optional': True}),
('other_meta_data', Attributes, {'optional': True}),
]
class TimeStampAndCRL(Sequence):
_fields = [
('time_stamp', EncapsulatedContentInfo),
('crl', CertificateList, {'optional': True}),
]
class TimeStampTokenEvidence(SequenceOf):
_child_spec = TimeStampAndCRL
class DigestAlgorithms(SequenceOf):
_child_spec = DigestAlgorithm
class EncryptionInfo(Sequence):
_fields = [
('encryption_info_type', ObjectIdentifier),
('encryption_info_value', Any),
]
class PartialHashtree(SequenceOf):
_child_spec = OctetString
class PartialHashtrees(SequenceOf):
_child_spec = PartialHashtree
class ArchiveTimeStamp(Sequence):
_fields = [
('digest_algorithm', DigestAlgorithm, {'implicit': 0, 'optional': True}),
('attributes', Attributes, {'implicit': 1, 'optional': True}),
('reduced_hashtree', PartialHashtrees, {'implicit': 2, 'optional': True}),
('time_stamp', ContentInfo),
]
class ArchiveTimeStampSequence(SequenceOf):
_child_spec = ArchiveTimeStamp
class EvidenceRecord(Sequence):
_fields = [
('version', Version),
('digest_algorithms', DigestAlgorithms),
('crypto_infos', Attributes, {'implicit': 0, 'optional': True}),
('encryption_info', EncryptionInfo, {'implicit': 1, 'optional': True}),
('archive_time_stamp_sequence', ArchiveTimeStampSequence),
]
class OtherEvidence(Sequence):
_fields = [
('oe_type', ObjectIdentifier),
('oe_value', Any),
]
class Evidence(Choice):
_alternatives = [
('tst_evidence', TimeStampTokenEvidence, {'implicit': 0}),
('ers_evidence', EvidenceRecord, {'implicit': 1}),
('other_evidence', OtherEvidence, {'implicit': 2}),
]
class TimeStampedData(Sequence):
_fields = [
('version', Version),
('data_uri', IA5String, {'optional': True}),
('meta_data', MetaData, {'optional': True}),
('content', OctetString, {'optional': True}),
('temporal_evidence', Evidence),
]
class IssuerSerial(Sequence):
_fields = [
('issuer', GeneralNames),
('serial_number', Integer),
]
class ESSCertID(Sequence):
_fields = [
('cert_hash', OctetString),
('issuer_serial', IssuerSerial, {'optional': True}),
]
class ESSCertIDs(SequenceOf):
_child_spec = ESSCertID
class SigningCertificate(Sequence):
_fields = [
('certs', ESSCertIDs),
('policies', CertificatePolicies, {'optional': True}),
]
class SetOfSigningCertificates(SetOf):
_child_spec = SigningCertificate
class ESSCertIDv2(Sequence):
_fields = [
('hash_algorithm', DigestAlgorithm, {'default': {'algorithm': 'sha256'}}),
('cert_hash', OctetString),
('issuer_serial', IssuerSerial, {'optional': True}),
]
class ESSCertIDv2s(SequenceOf):
_child_spec = ESSCertIDv2
class SigningCertificateV2(Sequence):
_fields = [
('certs', ESSCertIDv2s),
('policies', CertificatePolicies, {'optional': True}),
]
class SetOfSigningCertificatesV2(SetOf):
_child_spec = SigningCertificateV2
EncapsulatedContentInfo._oid_specs['tst_info'] = TSTInfo
EncapsulatedContentInfo._oid_specs['timestamped_data'] = TimeStampedData
ContentInfo._oid_specs['timestamped_data'] = TimeStampedData
ContentType._map['1.2.840.113549.1.9.16.1.4'] = 'tst_info'
ContentType._map['1.2.840.113549.1.9.16.1.31'] = 'timestamped_data'
CMSAttributeType._map['1.2.840.113549.1.9.16.2.12'] = 'signing_certificate'
CMSAttribute._oid_specs['signing_certificate'] = SetOfSigningCertificates
CMSAttributeType._map['1.2.840.113549.1.9.16.2.47'] = 'signing_certificate_v2'
CMSAttribute._oid_specs['signing_certificate_v2'] = SetOfSigningCertificatesV2

View File

@ -0,0 +1,878 @@
# coding: utf-8
"""
Miscellaneous data helpers, including functions for converting integers to and
from bytes and UTC timezone. Exports the following items:
- OrderedDict()
- int_from_bytes()
- int_to_bytes()
- timezone.utc
- utc_with_dst
- create_timezone()
- inet_ntop()
- inet_pton()
- uri_to_iri()
- iri_to_uri()
"""
from __future__ import unicode_literals, division, absolute_import, print_function
import math
import sys
from datetime import datetime, date, timedelta, tzinfo
from ._errors import unwrap
from ._iri import iri_to_uri, uri_to_iri # noqa
from ._ordereddict import OrderedDict # noqa
from ._types import type_name
if sys.platform == 'win32':
from ._inet import inet_ntop, inet_pton
else:
from socket import inet_ntop, inet_pton # noqa
# Python 2
if sys.version_info <= (3,):
def int_to_bytes(value, signed=False, width=None):
"""
Converts an integer to a byte string
:param value:
The integer to convert
:param signed:
If the byte string should be encoded using two's complement
:param width:
If None, the minimal possible size (but at least 1),
otherwise an integer of the byte width for the return value
:return:
A byte string
"""
if value == 0 and width == 0:
return b''
# Handle negatives in two's complement
is_neg = False
if signed and value < 0:
is_neg = True
bits = int(math.ceil(len('%x' % abs(value)) / 2.0) * 8)
value = (value + (1 << bits)) % (1 << bits)
hex_str = '%x' % value
if len(hex_str) & 1:
hex_str = '0' + hex_str
output = hex_str.decode('hex')
if signed and not is_neg and ord(output[0:1]) & 0x80:
output = b'\x00' + output
if width is not None:
if len(output) > width:
raise OverflowError('int too big to convert')
if is_neg:
pad_char = b'\xFF'
else:
pad_char = b'\x00'
output = (pad_char * (width - len(output))) + output
elif is_neg and ord(output[0:1]) & 0x80 == 0:
output = b'\xFF' + output
return output
def int_from_bytes(value, signed=False):
"""
Converts a byte string to an integer
:param value:
The byte string to convert
:param signed:
If the byte string should be interpreted using two's complement
:return:
An integer
"""
if value == b'':
return 0
num = long(value.encode("hex"), 16) # noqa
if not signed:
return num
# Check for sign bit and handle two's complement
if ord(value[0:1]) & 0x80:
bit_len = len(value) * 8
return num - (1 << bit_len)
return num
class timezone(tzinfo): # noqa
"""
Implements datetime.timezone for py2.
Only full minute offsets are supported.
DST is not supported.
"""
def __init__(self, offset, name=None):
"""
:param offset:
A timedelta with this timezone's offset from UTC
:param name:
Name of the timezone; if None, generate one.
"""
if not timedelta(hours=-24) < offset < timedelta(hours=24):
raise ValueError('Offset must be in [-23:59, 23:59]')
if offset.seconds % 60 or offset.microseconds:
raise ValueError('Offset must be full minutes')
self._offset = offset
if name is not None:
self._name = name
elif not offset:
self._name = 'UTC'
else:
self._name = 'UTC' + _format_offset(offset)
def __eq__(self, other):
"""
Compare two timezones
:param other:
The other timezone to compare to
:return:
A boolean
"""
if type(other) != timezone:
return False
return self._offset == other._offset
def __getinitargs__(self):
"""
Called by tzinfo.__reduce__ to support pickle and copy.
:return:
offset and name, to be used for __init__
"""
return self._offset, self._name
def tzname(self, dt):
"""
:param dt:
A datetime object; ignored.
:return:
Name of this timezone
"""
return self._name
def utcoffset(self, dt):
"""
:param dt:
A datetime object; ignored.
:return:
A timedelta object with the offset from UTC
"""
return self._offset
def dst(self, dt):
"""
:param dt:
A datetime object; ignored.
:return:
Zero timedelta
"""
return timedelta(0)
timezone.utc = timezone(timedelta(0))
# Python 3
else:
from datetime import timezone # noqa
def int_to_bytes(value, signed=False, width=None):
"""
Converts an integer to a byte string
:param value:
The integer to convert
:param signed:
If the byte string should be encoded using two's complement
:param width:
If None, the minimal possible size (but at least 1),
otherwise an integer of the byte width for the return value
:return:
A byte string
"""
if width is None:
if signed:
if value < 0:
bits_required = abs(value + 1).bit_length()
else:
bits_required = value.bit_length()
if bits_required % 8 == 0:
bits_required += 1
else:
bits_required = value.bit_length()
width = math.ceil(bits_required / 8) or 1
return value.to_bytes(width, byteorder='big', signed=signed)
def int_from_bytes(value, signed=False):
"""
Converts a byte string to an integer
:param value:
The byte string to convert
:param signed:
If the byte string should be interpreted using two's complement
:return:
An integer
"""
return int.from_bytes(value, 'big', signed=signed)
def _format_offset(off):
"""
Format a timedelta into "[+-]HH:MM" format or "" for None
"""
if off is None:
return ''
mins = off.days * 24 * 60 + off.seconds // 60
sign = '-' if mins < 0 else '+'
return sign + '%02d:%02d' % divmod(abs(mins), 60)
class _UtcWithDst(tzinfo):
"""
Utc class where dst does not return None; required for astimezone
"""
def tzname(self, dt):
return 'UTC'
def utcoffset(self, dt):
return timedelta(0)
def dst(self, dt):
return timedelta(0)
utc_with_dst = _UtcWithDst()
_timezone_cache = {}
def create_timezone(offset):
"""
Returns a new datetime.timezone object with the given offset.
Uses cached objects if possible.
:param offset:
A datetime.timedelta object; It needs to be in full minutes and between -23:59 and +23:59.
:return:
A datetime.timezone object
"""
try:
tz = _timezone_cache[offset]
except KeyError:
tz = _timezone_cache[offset] = timezone(offset)
return tz
class extended_date(object):
"""
A datetime.datetime-like object that represents the year 0. This is just
to handle 0000-01-01 found in some certificates. Python's datetime does
not support year 0.
The proleptic gregorian calendar repeats itself every 400 years. Therefore,
the simplest way to format is to substitute year 2000.
"""
def __init__(self, year, month, day):
"""
:param year:
The integer 0
:param month:
An integer from 1 to 12
:param day:
An integer from 1 to 31
"""
if year != 0:
raise ValueError('year must be 0')
self._y2k = date(2000, month, day)
@property
def year(self):
"""
:return:
The integer 0
"""
return 0
@property
def month(self):
"""
:return:
An integer from 1 to 12
"""
return self._y2k.month
@property
def day(self):
"""
:return:
An integer from 1 to 31
"""
return self._y2k.day
def strftime(self, format):
"""
Formats the date using strftime()
:param format:
A strftime() format string
:return:
A str, the formatted date as a unicode string
in Python 3 and a byte string in Python 2
"""
# Format the date twice, once with year 2000, once with year 4000.
# The only differences in the result will be in the millennium. Find them and replace by zeros.
y2k = self._y2k.strftime(format)
y4k = self._y2k.replace(year=4000).strftime(format)
return ''.join('0' if (c2, c4) == ('2', '4') else c2 for c2, c4 in zip(y2k, y4k))
def isoformat(self):
"""
Formats the date as %Y-%m-%d
:return:
The date formatted to %Y-%m-%d as a unicode string in Python 3
and a byte string in Python 2
"""
return self.strftime('0000-%m-%d')
def replace(self, year=None, month=None, day=None):
"""
Returns a new datetime.date or asn1crypto.util.extended_date
object with the specified components replaced
:return:
A datetime.date or asn1crypto.util.extended_date object
"""
if year is None:
year = self.year
if month is None:
month = self.month
if day is None:
day = self.day
if year > 0:
cls = date
else:
cls = extended_date
return cls(
year,
month,
day
)
def __str__(self):
"""
:return:
A str representing this extended_date, e.g. "0000-01-01"
"""
return self.strftime('%Y-%m-%d')
def __eq__(self, other):
"""
Compare two extended_date objects
:param other:
The other extended_date to compare to
:return:
A boolean
"""
# datetime.date object wouldn't compare equal because it can't be year 0
if not isinstance(other, self.__class__):
return False
return self.__cmp__(other) == 0
def __ne__(self, other):
"""
Compare two extended_date objects
:param other:
The other extended_date to compare to
:return:
A boolean
"""
return not self.__eq__(other)
def _comparison_error(self, other):
raise TypeError(unwrap(
'''
An asn1crypto.util.extended_date object can only be compared to
an asn1crypto.util.extended_date or datetime.date object, not %s
''',
type_name(other)
))
def __cmp__(self, other):
"""
Compare two extended_date or datetime.date objects
:param other:
The other extended_date object to compare to
:return:
An integer smaller than, equal to, or larger than 0
"""
# self is year 0, other is >= year 1
if isinstance(other, date):
return -1
if not isinstance(other, self.__class__):
self._comparison_error(other)
if self._y2k < other._y2k:
return -1
if self._y2k > other._y2k:
return 1
return 0
def __lt__(self, other):
return self.__cmp__(other) < 0
def __le__(self, other):
return self.__cmp__(other) <= 0
def __gt__(self, other):
return self.__cmp__(other) > 0
def __ge__(self, other):
return self.__cmp__(other) >= 0
class extended_datetime(object):
"""
A datetime.datetime-like object that represents the year 0. This is just
to handle 0000-01-01 found in some certificates. Python's datetime does
not support year 0.
The proleptic gregorian calendar repeats itself every 400 years. Therefore,
the simplest way to format is to substitute year 2000.
"""
# There are 97 leap days during 400 years.
DAYS_IN_400_YEARS = 400 * 365 + 97
DAYS_IN_2000_YEARS = 5 * DAYS_IN_400_YEARS
def __init__(self, year, *args, **kwargs):
"""
:param year:
The integer 0
:param args:
Other positional arguments; see datetime.datetime.
:param kwargs:
Other keyword arguments; see datetime.datetime.
"""
if year != 0:
raise ValueError('year must be 0')
self._y2k = datetime(2000, *args, **kwargs)
@property
def year(self):
"""
:return:
The integer 0
"""
return 0
@property
def month(self):
"""
:return:
An integer from 1 to 12
"""
return self._y2k.month
@property
def day(self):
"""
:return:
An integer from 1 to 31
"""
return self._y2k.day
@property
def hour(self):
"""
:return:
An integer from 1 to 24
"""
return self._y2k.hour
@property
def minute(self):
"""
:return:
An integer from 1 to 60
"""
return self._y2k.minute
@property
def second(self):
"""
:return:
An integer from 1 to 60
"""
return self._y2k.second
@property
def microsecond(self):
"""
:return:
An integer from 0 to 999999
"""
return self._y2k.microsecond
@property
def tzinfo(self):
"""
:return:
If object is timezone aware, a datetime.tzinfo object, else None.
"""
return self._y2k.tzinfo
def utcoffset(self):
"""
:return:
If object is timezone aware, a datetime.timedelta object, else None.
"""
return self._y2k.utcoffset()
def time(self):
"""
:return:
A datetime.time object
"""
return self._y2k.time()
def date(self):
"""
:return:
An asn1crypto.util.extended_date of the date
"""
return extended_date(0, self.month, self.day)
def strftime(self, format):
"""
Performs strftime(), always returning a str
:param format:
A strftime() format string
:return:
A str of the formatted datetime
"""
# Format the datetime twice, once with year 2000, once with year 4000.
# The only differences in the result will be in the millennium. Find them and replace by zeros.
y2k = self._y2k.strftime(format)
y4k = self._y2k.replace(year=4000).strftime(format)
return ''.join('0' if (c2, c4) == ('2', '4') else c2 for c2, c4 in zip(y2k, y4k))
def isoformat(self, sep='T'):
"""
Formats the date as "%Y-%m-%d %H:%M:%S" with the sep param between the
date and time portions
:param set:
A single character of the separator to place between the date and
time
:return:
The formatted datetime as a unicode string in Python 3 and a byte
string in Python 2
"""
s = '0000-%02d-%02d%c%02d:%02d:%02d' % (self.month, self.day, sep, self.hour, self.minute, self.second)
if self.microsecond:
s += '.%06d' % self.microsecond
return s + _format_offset(self.utcoffset())
def replace(self, year=None, *args, **kwargs):
"""
Returns a new datetime.datetime or asn1crypto.util.extended_datetime
object with the specified components replaced
:param year:
The new year to substitute. None to keep it.
:param args:
Other positional arguments; see datetime.datetime.replace.
:param kwargs:
Other keyword arguments; see datetime.datetime.replace.
:return:
A datetime.datetime or asn1crypto.util.extended_datetime object
"""
if year:
return self._y2k.replace(year, *args, **kwargs)
return extended_datetime.from_y2k(self._y2k.replace(2000, *args, **kwargs))
def astimezone(self, tz):
"""
Convert this extended_datetime to another timezone.
:param tz:
A datetime.tzinfo object.
:return:
A new extended_datetime or datetime.datetime object
"""
return extended_datetime.from_y2k(self._y2k.astimezone(tz))
def timestamp(self):
"""
Return POSIX timestamp. Only supported in python >= 3.3
:return:
A float representing the seconds since 1970-01-01 UTC. This will be a negative value.
"""
return self._y2k.timestamp() - self.DAYS_IN_2000_YEARS * 86400
def __str__(self):
"""
:return:
A str representing this extended_datetime, e.g. "0000-01-01 00:00:00.000001-10:00"
"""
return self.isoformat(sep=' ')
def __eq__(self, other):
"""
Compare two extended_datetime objects
:param other:
The other extended_datetime to compare to
:return:
A boolean
"""
# Only compare against other datetime or extended_datetime objects
if not isinstance(other, (self.__class__, datetime)):
return False
# Offset-naive and offset-aware datetimes are never the same
if (self.tzinfo is None) != (other.tzinfo is None):
return False
return self.__cmp__(other) == 0
def __ne__(self, other):
"""
Compare two extended_datetime objects
:param other:
The other extended_datetime to compare to
:return:
A boolean
"""
return not self.__eq__(other)
def _comparison_error(self, other):
"""
Raises a TypeError about the other object not being suitable for
comparison
:param other:
The object being compared to
"""
raise TypeError(unwrap(
'''
An asn1crypto.util.extended_datetime object can only be compared to
an asn1crypto.util.extended_datetime or datetime.datetime object,
not %s
''',
type_name(other)
))
def __cmp__(self, other):
"""
Compare two extended_datetime or datetime.datetime objects
:param other:
The other extended_datetime or datetime.datetime object to compare to
:return:
An integer smaller than, equal to, or larger than 0
"""
if not isinstance(other, (self.__class__, datetime)):
self._comparison_error(other)
if (self.tzinfo is None) != (other.tzinfo is None):
raise TypeError("can't compare offset-naive and offset-aware datetimes")
diff = self - other
zero = timedelta(0)
if diff < zero:
return -1
if diff > zero:
return 1
return 0
def __lt__(self, other):
return self.__cmp__(other) < 0
def __le__(self, other):
return self.__cmp__(other) <= 0
def __gt__(self, other):
return self.__cmp__(other) > 0
def __ge__(self, other):
return self.__cmp__(other) >= 0
def __add__(self, other):
"""
Adds a timedelta
:param other:
A datetime.timedelta object to add.
:return:
A new extended_datetime or datetime.datetime object.
"""
return extended_datetime.from_y2k(self._y2k + other)
def __sub__(self, other):
"""
Subtracts a timedelta or another datetime.
:param other:
A datetime.timedelta or datetime.datetime or extended_datetime object to subtract.
:return:
If a timedelta is passed, a new extended_datetime or datetime.datetime object.
Else a datetime.timedelta object.
"""
if isinstance(other, timedelta):
return extended_datetime.from_y2k(self._y2k - other)
if isinstance(other, extended_datetime):
return self._y2k - other._y2k
if isinstance(other, datetime):
return self._y2k - other - timedelta(days=self.DAYS_IN_2000_YEARS)
return NotImplemented
def __rsub__(self, other):
return -(self - other)
@classmethod
def from_y2k(cls, value):
"""
Revert substitution of year 2000.
:param value:
A datetime.datetime object which is 2000 years in the future.
:return:
A new extended_datetime or datetime.datetime object.
"""
year = value.year - 2000
if year > 0:
new_cls = datetime
else:
new_cls = cls
return new_cls(
year,
value.month,
value.day,
value.hour,
value.minute,
value.second,
value.microsecond,
value.tzinfo
)

View File

@ -0,0 +1,6 @@
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
__version__ = '1.4.0'
__version_info__ = (1, 4, 0)

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,458 @@
# changelog
## 1.4.0
- `core.ObjectIdentifier` and all derived classes now obey X.660 §7.6 and
thus restrict the first arc to 0 to 2, and the second arc to less than
40 if the first arc is 0 or 1. This also fixes parsing of OIDs where the
first arc is 2 and the second arc is greater than 39.
- Fixed `keys.PublicKeyInfo.bit_size` to return an int rather than a float
on Python 3 when working with elliptic curve keys
- Fixed the `asn1crypto-tests` sdist on PyPi to work properly to generate a
.whl
## 1.3.0
- Added `encrypt_key_pref` (`1.2.840.113549.1.9.16.2.11`) to
`cms.CMSAttributeType()`, along with related structures
- Added Brainpool curves from RFC 5639 to `keys.NamedCurve()`
- Fixed `x509.Certificate().subject_directory_attributes_value`
- Fixed some incorrectly computed minimum elliptic curve primary key
encoding sizes in `keys.NamedCurve()`
- Fixed a `TypeError` when trying to call `.untag()` or `.copy()` on a
`core.UTCTime()` or `core.GeneralizedTime()`, or a value containing one,
when using Python 2
## 1.2.0
- Added `asn1crypto.load_order()`, which returns a `list` of unicode strings
of the names of the fully-qualified module names for all of submodules of
the package. The module names are listed in their dependency load order.
This is primarily intended for the sake of implementing hot reloading.
## 1.1.0
- Added User ID (`0.9.2342.19200300.100.1.1`) to `x509.NameType()`
- Added various EC named curves to `keys.NamedCurve()`
## 1.0.1
- Fix an absolute import in `keys` to a relative import
## 1.0.0
- Backwards Compatibility Breaks
- `cms.KeyEncryptionAlgorithmId().native` now returns the value
`"rsaes_pkcs1v15"` for OID `1.2.840.113549.1.1.1` instead of `"rsa"`
- Removed functionality to calculate public key values from private key
values. Alternatives have been added to oscrypto.
- `keys.PrivateKeyInfo().unwrap()` is now
`oscrypto.asymmetric.PrivateKey().unwrap()`
- `keys.PrivateKeyInfo().public_key` is now
`oscrypto.asymmetric.PrivateKey().public_key.unwrap()`
- `keys.PrivateKeyInfo().public_key_info` is now
`oscrypto.asymmetric.PrivateKey().public_key.asn1`
- `keys.PrivateKeyInfo().fingerprint` is now
`oscrypto.asymmetric.PrivateKey().fingerprint`
- `keys.PublicKeyInfo().unwrap()` is now
`oscrypto.asymmetric.PublicKey().unwrap()`
- `keys.PublicKeyInfo().fingerprint` is now
`oscrypto.asymmetric.PublicKey().fingerprint`
- Enhancements
- Significantly improved parsing of `core.UTCTime()` and
`core.GeneralizedTime()` values that include timezones and fractional
seconds
- `util.timezone` has a more complete implementation
- `core.Choice()` may now be constructed by a 2-element tuple or a 1-key
dict
- Added `x509.Certificate().not_valid_before` and
`x509.Certificate().not_valid_after`
- Added `core.BitString().unused_bits`
- Added `keys.NamedCurve.register()` for non-mainstream curve OIDs
- No longer try to load optional performance dependency, `libcrypto`,
on Mac or Linux
- `ocsp.CertStatus().native` will now return meaningful unicode string
values when the status choice is `"good"` or `"unknown"`. Previously
both returned `None` due to the way the structure was designed.
- Add support for explicit RSA SSA PSS (`1.2.840.113549.1.1.10`) to
`keys.PublicKeyInfo()` and `keys.PrivateKeyInfo()`
- Added structures for nested SHA-256 Windows PE signatures to
`cms.CMSAttribute()`
- Added RC4 (`1.2.840.113549.3.4`) to `algos.EncryptionAlgorithmId()`
- Added secp256k1 (`1.3.132.0.10`) to `keys.NamedCurve()`
- Added SHA-3 and SHAKE OIDs to `algos.DigestAlgorithmId()` and
`algos.HmacAlgorithmId()`
- Added RSA ES OAEP (`1.2.840.113549.1.1.7`) to
`cms.KeyEncryptionAlgorithmId()`
- Add IKE Intermediate (`1.3.6.1.5.5.8.2.2`) to `x509.KeyPurposeId()`
- `x509.EmailAddress()` and `x509.DNSName()` now handle invalidly-encoded
values using tags for `core.PrintableString()` and `core.UTF8String()`
- Add parameter structue from RFC 5084 for AES-CCM to
`algos.EncryptionAlgorithm()`
- Improved robustness of parsing broken `core.Sequence()` and
`core.SequenceOf()` values
- Bug Fixes
- Fixed encoding of tag values over 30
- `core.IntegerBitString()` and `core.IntegerOctetString()` now restrict
values to non-negative integers since negative values are not
implemented
- When copying or dumping a BER-encoded indefinite-length value,
automatically force re-encoding to DER. *To ensure all nested values are
always DER-encoded, `.dump(True)` must be called.*
- Fix `UnboundLocalError` when calling `x509.IPAddress().native` on an
encoded value that has a length of zero
- Fixed passing `class_` via unicode string name to `core.Asn1Value()`
- Fixed a bug where EC private keys with leading null bytes would be
encoded in `keys.ECPrivateKey()` more narrowly than RFC 5915 requires
- Fixed some edge-case bugs in `util.int_to_bytes()`
- `x509.URI()` now only normalizes values when comparing
- Fixed BER-decoding of indefinite length `core.BitString()`
- Fixed DER-encoding of empty `core.BitString()`
- Fixed a missing return value for `core.Choice().parse()`
- Fixed `core.Choice().contents` working when the chosen alternative is a
`core.Choice()` also
- Fixed parsing and encoding of nested `core.Choice()` objects
- Fixed a bug causing `core.ObjectIdentifier().native` to sometimes not
map the OID
- Packaging
- `wheel`, `sdist` and `bdist_egg` releases now all include LICENSE,
`sdist` includes docs
- Added `asn1crypto_tests` package to PyPi
## 0.24.0
- `x509.Certificate().self_signed` will no longer return `"yes"` under any
circumstances. This helps prevent confusion since the library does not
verify the signature. Instead a library like oscrypto should be used
to confirm if a certificate is self-signed.
- Added various OIDs to `x509.KeyPurposeId()`
- Added `x509.Certificate().private_key_usage_period_value`
- Added structures for parsing common subject directory attributes for
X.509 certificates, including `x509.SubjectDirectoryAttribute()`
- Added `algos.AnyAlgorithmIdentifier()` for situations where an
algorithm identifier may contain a digest, signed digest or encryption
algorithm OID
- Fixed a bug with `x509.Certificate().subject_directory_attributes_value`
not returning the correct value
- Fixed a bug where explicitly-tagged fields in a `core.Sequence()` would
not function properly when the field had a default value
- Fixed a bug with type checking in `pem.armor()`
## 0.23.0
- Backwards compatibility break: the `tag_type`, `explicit_tag` and
`explicit_class` attributes on `core.Asn1Value` no longer exist and were
replaced by the `implicit` and `explicit` attributes. Field param dicts
may use the new `explicit` and `implicit` keys, or the old `tag_type` and
`tag` keys. The attribute changes will likely to have little to no impact
since they were primarily an implementation detail.
- Teletex strings used inside of X.509 certificates are now interpreted
using Windows-1252 (a superset of ISO-8859-1). This enables compatibility
with certificates generated by OpenSSL. Strict parsing of Teletex strings
can be retained by using the `x509.strict_teletex()` context manager.
- Added support for nested explicit tagging, supporting values that are
defined with explicit tagging and then added as a field of another
structure using explicit tagging.
- Fixed a `UnicodeDecodeError` when trying to find the (optional) dependency
OpenSSL on Python 2
- Fixed `next_update` field of `crl.TbsCertList` to be optional
- Added the `x509.Certificate.sha256_fingerprint` property
- `x509.Certificate.ocsp_urls` and `x509.DistributionPoint.url` will now
return `https://`, `ldap://` and `ldaps://` URLs in addition to `http://`.
- Added CMS Attribute Protection definitions from RFC 6211
- Added OIDs from RFC 6962
## 0.22.0
- Added `parser.peek()`
- Implemented proper support for BER-encoded indefinite length strings of
all kinds - `core.BitString`, `core.OctetString` and all of the `core`
classes that are natively represented as Python unicode strings
- Fixed a bug with encoding LDAP URLs in `x509.URI`
- Correct `x509.DNSName` to allow a leading `.`, such as when used with
`x509.NameConstraints`
- Fixed an issue with dumping the parsed contents of `core.Any` when
explicitly tagged
- Custom `setup.py clean` now accepts the short `-a` flag for compatibility
## 0.21.1
- Fixed a regression where explicit tagging of a field containing a
`core.Choice` would result in an incorrect header
- Fixed a bug where an `IndexError` was being raised instead of a `ValueError`
when a value was truncated to not include enough bytes for the header
- Corrected the spec for the `value` field of `pkcs12.Attribute`
- Added support for `2.16.840.1.113894.746875.1.1` OID to
`pkcs12.AttributeType`
## 0.21.0
- Added `core.load()` for loading standard, universal types without knowing
the spec beforehand
- Added a `strict` keyword arg to the various `load()` methods and functions in
`core` that checks for trailing data and raises a `ValueError` when found
- Added `asn1crypto.parser` submodule with `emit()` and `parse()` functions for
low-level integration
- Added `asn1crypto.version` for version introspection without side-effects
- Added `algos.DSASignature`
- Fixed a bug with the `_header` attribute of explicitly-tagged values only
containing the explicit tag header instead of both the explicit tag header
and the encapsulated value header
## 0.20.0
- Added support for year 0
- Added the OID for unique identifier to `x509.NameType`
- Fixed a bug creating the native representation of a `core.BitString` with
leading null bytes
- Added a `.cast()` method to allow converting between different
representations of the same data, e.g. `core.BitString` and
`core.OctetBitString`
## 0.19.0
- Force `algos.DigestAlgorithm` to encoding `parameters` as `Null` when the
`algorithm` is `sha1`, `sha224`, `sha256`, `sha384` or `sha512` per RFC 4055
- Resolved an issue where a BER-encoded indefinite-length value could not be
properly parsed when embedded inside of a `core.Sequence` or `core.Set`
- Fix `x509.Name.build()` to properly handle dotted OID type values
- `core.Choice` can now be constructed from a single-element `dict` or a
two-element `tuple` to allow for better usability when constructing values
from native Python values
- All `core` objects can now be passed to `print()` with an exception being
raised
## 0.18.5
- Don't fail importing if `ctypes` or `_ctypes` is not available
## 0.18.4
- `core.Sequence` will now raise an exception when an unknown field is provided
- Prevent `UnicodeDecodeError` on Python 2 when calling
`core.OctetString.debug()`
- Corrected the default value for the `hash_algorithm` field of
`tsp.ESSCertIDv2`
- Fixed a bug constructing a `cms.SignedData` object
- Ensure that specific RSA OIDs are always paired with `parameters` set to
`core.Null`
## 0.18.3
- Fixed DER encoding of `core.BitString` when a `_map` is specified (i.e. a
"named bit list") to omit trailing zero bits. This fixes compliance of
various `x509` structures with RFC 5280.
- Corrected a side effect in `keys.PrivateKeyInfo.wrap()` that would cause the
original `keys.ECPrivateKey` structure to become corrupt
- `core.IntegerOctetString` now correctly encodes the integer as an unsigned
value when converting to bytes. Previously decoding was unsigned, but
encoding was signed.
- Fix `util.int_from_bytes()` on Python 2 to return `0` from an empty byte
string
## 0.18.2
- Allow `_perf` submodule to be removed from source tree when embedding
## 0.18.1
- Fixed DER encoding of `core.Set` and `core.SetOf`
- Fixed a bug in `x509.Name.build()` that could generate invalid DER encoding
- Improved exception messages when parsing nested structures via the `.native`
attribute
- `algos.SignedDigestAlgorithm` now ensures the `parameters` are set to
`Null` when `algorithm` is `sha224_rsa`, `sha256_rsa`, `sha384_rsa` or
`sha512_rsa`, per RFC 4055
- Corrected the definition of `pdf.AdobeTimestamp` to mark the
`requires_auth` field as optional
- Add support for the OID `1.2.840.113549.1.9.16.2.14` to
`cms.CMSAttributeType`
- Improve attribute support for `cms.AttributeCertificateV2`
- Handle `cms.AttributeCertificateV2` when incorrectly tagged as
`cms.AttributeCertificateV1` in `cms.CertificateChoices`
## 0.18.0
- Improved general parsing performance by 10-15%
- Add support for Windows XP
- Added `core.ObjectIdentifier.dotted` attribute to always return dotted
integer unicode string
- Added `core.ObjectIdentifier.map()` and `core.ObjectIdentifier.unmap()`
class methods to map dotted integer unicode strings to user-friendly unicode
strings and back
- Added various Apple OIDs to `x509.KeyPurposeId`
- Fixed a bug parsing nested indefinite-length-encoded values
- Fixed a bug with `x509.Certificate.issuer_alt_name_value` if it is the first
extension queried
- `keys.PublicKeyInfo.bit_size` and `keys.PrivateKeyInfo.bit_size` values are
now rounded up to the next closest multiple of 8
## 0.17.1
- Fix a bug in `x509.URI` parsing IRIs containing explicit port numbers on
Python 3.x
## 0.17.0
- Added `x509.TrustedCertificate` for handling OpenSSL auxiliary certificate
information appended after a certificate
- Added `core.Concat` class for situations such as `x509.TrustedCertificate`
- Allow "broken" X.509 certificates to use `core.IA5String` where an
`x509.DirectoryString` should be used instead
- Added `keys.PrivateKeyInfo.public_key_info` attribute
- Added a bunch of OIDs to `x509.KeyPurposeId`
## 0.16.0
- Added DH key exchange structures: `algos.KeyExchangeAlgorithm`,
`algos.KeyExchangeAlgorithmId` and `algos.DHParameters`.
- Added DH public key support to `keys.PublicKeyInfo`,
`keys.PublicKeyAlgorithm` and `keys.PublicKeyAlgorithmId`. New structures
include `keys.DomainParameters` and `keys.ValidationParms`.
## 0.15.1
- Fixed `cms.CMSAttributes` to be a `core.SetOf` instead of `core.SequenceOf`
- `cms.CMSAttribute` can now parse unknown attribute contrustruct without an
exception being raised
- `x509.PolicyMapping` now uses `x509.PolicyIdentifier` for field types
- Fixed `pdf.RevocationInfoArchival` so that all fields are now of the type
`core.SequenceOf` instead of a single value
- Added support for the `name_distinguisher`, `telephone_number` and
`organization_identifier` OIDs to `x509.Name`
- Fixed `x509.Name.native` to not accidentally create nested lists when three
of more values for a single type are part of the name
- `x509.Name.human_friendly` now reverses the order of fields when the data
in an `x509.Name` was encoded in most-specific to least-specific order, which
is the opposite of the standard way of least-specific to most-specific.
- `x509.NameType.human_friendly` no longer raises an exception when an
unknown OID is encountered
- Raise a `ValueError` when parsing a `core.Set` and an unknown field is
encountered
## 0.15.0
- Added support for the TLS feature extension from RFC 7633
- `x509.Name.build()` now accepts a keyword parameter `use_printable` to force
string encoding to be `core.PrintableString` instead of `core.UTF8String`
- Added the functions `util.uri_to_iri()` and `util.iri_to_uri()`
- Changed `algos.SignedDigestAlgorithmId` to use the preferred OIDs when
mapping a unicode string name to an OID. Previously there were multiple OIDs
for some algorithms, and different OIDs would sometimes be selected due to
the fact that the `_map` `dict` is not ordered.
## 0.14.1
- Fixed a bug generating `x509.Certificate.sha1_fingerprint` on Python 2
## 0.14.0
- Added the `x509.Certificate.sha1_fingerprint` attribute
## 0.13.0
- Backwards compatibility break: the native representation of some
`algos.EncryptionAlgorithmId` values changed. `aes128` became `aes128_cbc`,
`aes192` became `aes192_cbc` and `aes256` became `aes256_cbc`.
- Added more OIDs to `algos.EncryptionAlgorithmId`
- Added more OIDs to `cms.KeyEncryptionAlgorithmId`
- `x509.Name.human_friendly` now properly supports multiple values per
`x509.NameTypeAndValue` object
- Added `ocsp.OCSPResponse.basic_ocsp_response` and
`ocsp.OCSPResponse.response_data` properties
- Added `algos.EncryptionAlgorithm.encryption_mode` property
- Fixed a bug with parsing times containing timezone offsets in Python 3
- The `attributes` field of `csr.CertificationRequestInfo` is now optional,
for compatibility with other ASN.1 parsers
## 0.12.2
- Correct `core.Sequence.__setitem__()` so set `core.VOID` to an optional
field when `None` is set
## 0.12.1
- Fixed a `unicode`/`bytes` bug with `x509.URI.dump()` on Python 2
## 0.12.0
- Backwards Compatibility Break: `core.NoValue` was renamed to `core.Void` and
a singleton was added as `core.VOID`
- 20-30% improvement in parsing performance
- `core.Void` now implements `__nonzero__`
- `core.Asn1Value.copy()` now performs a deep copy
- All `core` value classes are now compatible with the `copy` module
- `core.SequenceOf` and `core.SetOf` now implement `__contains__`
- Added `x509.Name.__len__()`
- Fixed a bug where `core.Choice.validate()` would not properly account for
explicit tagging
- `core.Choice.load()` now properly passes itself as the spec when parsing
- `x509.Certificate.crl_distribution_points` no longer throws an exception if
the `DistributionPoint` does not have a value for the `distribution_point`
field
## 0.11.1
- Corrected `core.UTCTime` to interpret year <= 49 as 20xx and >= 50 as 19xx
- `keys.PublicKeyInfo.hash_algo` can now handle DSA keys without parameters
- Added `crl.CertificateList.sha256` and `crl.CertificateList.sha1`
- Fixed `x509.Name.build()` to properly encode `country_name`, `serial_number`
and `dn_qualifier` as `core.PrintableString` as specified in RFC 5280,
instead of `core.UTF8String`
## 0.11.0
- Added Python 2.6 support
- Added ability to compare primitive type objects
- Implemented proper support for internationalized domains, URLs and email
addresses in `x509.Certificate`
- Comparing `x509.Name` and `x509.GeneralName` objects adheres to RFC 5280
- `x509.Certificate.self_signed` and `x509.Certificate.self_issued` no longer
require that certificate is for a CA
- Fixed `x509.Certificate.valid_domains` to adhere to RFC 6125
- Added `x509.Certificate.is_valid_domain_ip()`
- Added `x509.Certificate.sha1` and `x509.Certificate.sha256`
- Exposed `util.inet_ntop()` and `util.inet_pton()` for IP address encoding
- Improved exception messages for improper types to include type's module name
## 0.10.1
- Fixed bug in `core.Sequence` affecting Python 2.7 and pypy
## 0.10.0
- Added PEM encoding/decoding functionality
- `core.BitString` now uses item access instead of attributes for named bit
access
- `core.BitString.native` now uses a `set` of unicode strings when `_map` is
present
- Removed `core.Asn1Value.pprint()` method
- Added `core.ParsableOctetString` class
- Added `core.ParsableOctetBitString` class
- Added `core.Asn1Value.copy()` method
- Added `core.Asn1Value.debug()` method
- Added `core.SequenceOf.append()` method
- Added `core.Sequence.spec()` and `core.SequenceOf.spec()` methods
- Added correct IP address parsing to `x509.GeneralName`
- `x509.Name` and `x509.GeneralName` are now compared according to rules in
RFC 5280
- Added convenience attributes to:
- `algos.SignedDigestAlgorithm`
- `crl.CertificateList`
- `crl.RevokedCertificate`
- `keys.PublicKeyInfo`
- `ocsp.OCSPRequest`
- `ocsp.Request`
- `ocsp.OCSPResponse`
- `ocsp.SingleResponse`
- `x509.Certificate`
- `x509.Name`
- Added `asn1crypto.util` module with the following items:
- `int_to_bytes()`
- `int_from_bytes()`
- `timezone.utc`
- Added `setup.py clean` command
## 0.9.0
- Initial release

View File

@ -0,0 +1,28 @@
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
import os
package_name = "asn1crypto"
other_packages = [
"oscrypto",
"certbuilder",
"certvalidator",
"crlbuilder",
"csrbuilder",
"ocspbuilder"
]
task_keyword_args = []
requires_oscrypto = False
has_tests_package = True
package_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
build_root = os.path.abspath(os.path.join(package_root, '..'))
md_source_map = {}
definition_replacements = {}

View File

@ -0,0 +1,116 @@
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
import imp
import sys
import os
from . import build_root, package_name, package_root
if sys.version_info < (3,):
getcwd = os.getcwdu
else:
getcwd = os.getcwd
def _import_from(mod, path, mod_dir=None, allow_error=False):
"""
Imports a module from a specific path
:param mod:
A unicode string of the module name
:param path:
A unicode string to the directory containing the module
:param mod_dir:
If the sub directory of "path" is different than the "mod" name,
pass the sub directory as a unicode string
:param allow_error:
If an ImportError should be raised when the module can't be imported
:return:
None if not loaded, otherwise the module
"""
if mod_dir is None:
mod_dir = mod.replace('.', os.sep)
if not os.path.exists(path):
return None
if not os.path.exists(os.path.join(path, mod_dir)) \
and not os.path.exists(os.path.join(path, mod_dir + '.py')):
return None
if os.sep in mod_dir:
append, mod_dir = mod_dir.rsplit(os.sep, 1)
path = os.path.join(path, append)
try:
mod_info = imp.find_module(mod_dir, [path])
return imp.load_module(mod, *mod_info)
except ImportError:
if allow_error:
raise
return None
def _preload(require_oscrypto, print_info):
"""
Preloads asn1crypto and optionally oscrypto from a local source checkout,
or from a normal install
:param require_oscrypto:
A bool if oscrypto needs to be preloaded
:param print_info:
A bool if info about asn1crypto and oscrypto should be printed
"""
if print_info:
print('Working dir: ' + getcwd())
print('Python ' + sys.version.replace('\n', ''))
asn1crypto = None
oscrypto = None
if require_oscrypto:
# Some CI services don't use the package name for the dir
if package_name == 'oscrypto':
oscrypto_dir = package_root
else:
oscrypto_dir = os.path.join(build_root, 'oscrypto')
oscrypto_tests = None
if os.path.exists(oscrypto_dir):
oscrypto_tests = _import_from('oscrypto_tests', oscrypto_dir, 'tests')
if oscrypto_tests is None:
import oscrypto_tests
asn1crypto, oscrypto = oscrypto_tests.local_oscrypto()
else:
if package_name == 'asn1crypto':
asn1crypto_dir = package_root
else:
asn1crypto_dir = os.path.join(build_root, 'asn1crypto')
if os.path.exists(asn1crypto_dir):
asn1crypto = _import_from('asn1crypto', asn1crypto_dir)
if asn1crypto is None:
import asn1crypto
if print_info:
print(
'\nasn1crypto: %s, %s' % (
asn1crypto.__version__,
os.path.dirname(asn1crypto.__file__)
)
)
if require_oscrypto:
print(
'oscrypto: %s backend, %s, %s' % (
oscrypto.backend(),
oscrypto.__version__,
os.path.dirname(oscrypto.__file__)
)
)

View File

@ -0,0 +1,205 @@
# coding: utf-8
"""
This file was originally derived from
https://github.com/pypa/pip/blob/3e713708088aedb1cde32f3c94333d6e29aaf86e/src/pip/_internal/pep425tags.py
The following license covers that code:
Copyright (c) 2008-2018 The pip developers (see AUTHORS.txt file)
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
from __future__ import unicode_literals, division, absolute_import, print_function
import sys
import os
import ctypes
import re
import platform
if sys.version_info >= (2, 7):
import sysconfig
if sys.version_info < (3,):
str_cls = unicode # noqa
else:
str_cls = str
def _pep425_implementation():
"""
:return:
A 2 character unicode string of the implementation - 'cp' for cpython
or 'pp' for PyPy
"""
return 'pp' if hasattr(sys, 'pypy_version_info') else 'cp'
def _pep425_version():
"""
:return:
A tuple of integers representing the Python version number
"""
if hasattr(sys, 'pypy_version_info'):
return (sys.version_info[0], sys.pypy_version_info.major,
sys.pypy_version_info.minor)
else:
return (sys.version_info[0], sys.version_info[1])
def _pep425_supports_manylinux():
"""
:return:
A boolean indicating if the machine can use manylinux1 packages
"""
try:
import _manylinux
return bool(_manylinux.manylinux1_compatible)
except (ImportError, AttributeError):
pass
# Check for glibc 2.5
try:
proc = ctypes.CDLL(None)
gnu_get_libc_version = proc.gnu_get_libc_version
gnu_get_libc_version.restype = ctypes.c_char_p
ver = gnu_get_libc_version()
if not isinstance(ver, str_cls):
ver = ver.decode('ascii')
match = re.match(r'(\d+)\.(\d+)', ver)
return match and match.group(1) == '2' and int(match.group(2)) >= 5
except (AttributeError):
return False
def _pep425_get_abi():
"""
:return:
A unicode string of the system abi. Will be something like: "cp27m",
"cp33m", etc.
"""
try:
soabi = sysconfig.get_config_var('SOABI')
if soabi:
if soabi.startswith('cpython-'):
return 'cp%s' % soabi.split('-')[1]
return soabi.replace('.', '_').replace('-', '_')
except (IOError, NameError):
pass
impl = _pep425_implementation()
suffix = ''
if impl == 'cp':
suffix += 'm'
if sys.maxunicode == 0x10ffff and sys.version_info < (3, 3):
suffix += 'u'
return '%s%s%s' % (impl, ''.join(map(str_cls, _pep425_version())), suffix)
def _pep425tags():
"""
:return:
A list of 3-element tuples with unicode strings or None:
[0] implementation tag - cp33, pp27, cp26, py2, py2.py3
[1] abi tag - cp26m, None
[2] arch tag - linux_x86_64, macosx_10_10_x85_64, etc
"""
tags = []
versions = []
version_info = _pep425_version()
major = version_info[:-1]
for minor in range(version_info[-1], -1, -1):
versions.append(''.join(map(str, major + (minor,))))
impl = _pep425_implementation()
abis = []
abi = _pep425_get_abi()
if abi:
abis.append(abi)
abi3 = _pep425_implementation() == 'cp' and sys.version_info >= (3,)
if abi3:
abis.append('abi3')
abis.append('none')
if sys.platform == 'darwin':
plat_ver = platform.mac_ver()
ver_parts = plat_ver[0].split('.')
minor = int(ver_parts[1])
arch = plat_ver[2]
if sys.maxsize == 2147483647:
arch = 'i386'
arches = []
while minor > 5:
arches.append('macosx_10_%s_%s' % (minor, arch))
arches.append('macosx_10_%s_intel' % (minor,))
arches.append('macosx_10_%s_universal' % (minor,))
minor -= 1
else:
if sys.platform == 'win32':
if 'amd64' in sys.version.lower():
arches = ['win_amd64']
else:
arches = [sys.platform]
elif hasattr(os, 'uname'):
(plat, _, _, _, machine) = os.uname()
plat = plat.lower().replace('/', '')
machine.replace(' ', '_').replace('/', '_')
if plat == 'linux' and sys.maxsize == 2147483647 and 'arm' not in machine:
machine = 'i686'
arch = '%s_%s' % (plat, machine)
if _pep425_supports_manylinux():
arches = [arch.replace('linux', 'manylinux1'), arch]
else:
arches = [arch]
for abi in abis:
for arch in arches:
tags.append(('%s%s' % (impl, versions[0]), abi, arch))
if abi3:
for version in versions[1:]:
for arch in arches:
tags.append(('%s%s' % (impl, version), 'abi3', arch))
for arch in arches:
tags.append(('py%s' % (versions[0][0]), 'none', arch))
tags.append(('%s%s' % (impl, versions[0]), 'none', 'any'))
tags.append(('%s%s' % (impl, versions[0][0]), 'none', 'any'))
for i, version in enumerate(versions):
tags.append(('py%s' % (version,), 'none', 'any'))
if i == 0:
tags.append(('py%s' % (version[0]), 'none', 'any'))
tags.append(('py2.py3', 'none', 'any'))
return tags

View File

@ -0,0 +1,163 @@
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
import ast
import _ast
import os
import sys
from . import package_root, task_keyword_args
from ._import import _import_from
if sys.version_info < (3,):
byte_cls = str
else:
byte_cls = bytes
def _list_tasks():
"""
Fetches a list of all valid tasks that may be run, and the args they
accept. Does not actually import the task module to prevent errors if a
user does not have the dependencies installed for every task.
:return:
A list of 2-element tuples:
0: a unicode string of the task name
1: a list of dicts containing the parameter definitions
"""
out = []
dev_path = os.path.join(package_root, 'dev')
for fname in sorted(os.listdir(dev_path)):
if fname.startswith('.') or fname.startswith('_'):
continue
if not fname.endswith('.py'):
continue
name = fname[:-3]
args = ()
full_path = os.path.join(package_root, 'dev', fname)
with open(full_path, 'rb') as f:
full_code = f.read()
if sys.version_info >= (3,):
full_code = full_code.decode('utf-8')
task_node = ast.parse(full_code, filename=full_path)
for node in ast.iter_child_nodes(task_node):
if isinstance(node, _ast.Assign):
if len(node.targets) == 1 \
and isinstance(node.targets[0], _ast.Name) \
and node.targets[0].id == 'run_args':
args = ast.literal_eval(node.value)
break
out.append((name, args))
return out
def show_usage():
"""
Prints to stderr the valid options for invoking tasks
"""
valid_tasks = []
for task in _list_tasks():
usage = task[0]
for run_arg in task[1]:
usage += ' '
name = run_arg.get('name', '')
if run_arg.get('required', False):
usage += '{%s}' % name
else:
usage += '[%s]' % name
valid_tasks.append(usage)
out = 'Usage: run.py'
for karg in task_keyword_args:
out += ' [%s=%s]' % (karg['name'], karg['placeholder'])
out += ' (%s)' % ' | '.join(valid_tasks)
print(out, file=sys.stderr)
sys.exit(1)
def _get_arg(num):
"""
:return:
A unicode string of the requested command line arg
"""
if len(sys.argv) < num + 1:
return None
arg = sys.argv[num]
if isinstance(arg, byte_cls):
arg = arg.decode('utf-8')
return arg
def run_task():
"""
Parses the command line args, invoking the requested task
"""
arg_num = 1
task = None
args = []
kwargs = {}
# We look for the task name, processing any global task keyword args
# by setting the appropriate env var
while True:
val = _get_arg(arg_num)
if val is None:
break
next_arg = False
for karg in task_keyword_args:
if val.startswith(karg['name'] + '='):
os.environ[karg['env_var']] = val[len(karg['name']) + 1:]
next_arg = True
break
if next_arg:
arg_num += 1
continue
task = val
break
if task is None:
show_usage()
task_mod = _import_from('dev.%s' % task, package_root, allow_error=True)
if task_mod is None:
show_usage()
run_args = task_mod.__dict__.get('run_args', [])
max_args = arg_num + 1 + len(run_args)
if len(sys.argv) > max_args:
show_usage()
for i, run_arg in enumerate(run_args):
val = _get_arg(arg_num + 1 + i)
if val is None:
if run_arg.get('required', False):
show_usage()
break
if run_arg.get('cast') == 'int' and val.isdigit():
val = int(val)
kwarg = run_arg.get('kwarg')
if kwarg:
kwargs[kwarg] = val
else:
args.append(val)
run = task_mod.__dict__.get('run')
result = run(*args, **kwargs)
sys.exit(int(not result))

View File

@ -0,0 +1,89 @@
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
import imp
import os
import tarfile
import zipfile
import setuptools.sandbox
from . import package_root, package_name, has_tests_package
def _list_zip(filename):
"""
Prints all of the files in a .zip file
"""
zf = zipfile.ZipFile(filename, 'r')
for name in zf.namelist():
print(' %s' % name)
def _list_tgz(filename):
"""
Prints all of the files in a .tar.gz file
"""
tf = tarfile.open(filename, 'r:gz')
for name in tf.getnames():
print(' %s' % name)
def run():
"""
Creates a sdist .tar.gz and a bdist_wheel --univeral .whl
:return:
A bool - if the packaging process was successful
"""
setup = os.path.join(package_root, 'setup.py')
tests_root = os.path.join(package_root, 'tests')
tests_setup = os.path.join(tests_root, 'setup.py')
# Trying to call setuptools.sandbox.run_setup(setup, ['--version'])
# resulted in a segfault, so we do this instead
module_info = imp.find_module('version', [os.path.join(package_root, package_name)])
version_mod = imp.load_module('%s.version' % package_name, *module_info)
pkg_name_info = (package_name, version_mod.__version__)
print('Building %s-%s' % pkg_name_info)
sdist = '%s-%s.tar.gz' % pkg_name_info
whl = '%s-%s-py2.py3-none-any.whl' % pkg_name_info
setuptools.sandbox.run_setup(setup, ['-q', 'sdist'])
print(' - created %s' % sdist)
_list_tgz(os.path.join(package_root, 'dist', sdist))
setuptools.sandbox.run_setup(setup, ['-q', 'bdist_wheel', '--universal'])
print(' - created %s' % whl)
_list_zip(os.path.join(package_root, 'dist', whl))
setuptools.sandbox.run_setup(setup, ['-q', 'clean'])
if has_tests_package:
print('Building %s_tests-%s' % (package_name, version_mod.__version__))
tests_sdist = '%s_tests-%s.tar.gz' % pkg_name_info
tests_whl = '%s_tests-%s-py2.py3-none-any.whl' % pkg_name_info
setuptools.sandbox.run_setup(tests_setup, ['-q', 'sdist'])
print(' - created %s' % tests_sdist)
_list_tgz(os.path.join(tests_root, 'dist', tests_sdist))
setuptools.sandbox.run_setup(tests_setup, ['-q', 'bdist_wheel', '--universal'])
print(' - created %s' % tests_whl)
_list_zip(os.path.join(tests_root, 'dist', tests_whl))
setuptools.sandbox.run_setup(tests_setup, ['-q', 'clean'])
dist_dir = os.path.join(package_root, 'dist')
tests_dist_dir = os.path.join(tests_root, 'dist')
os.rename(
os.path.join(tests_dist_dir, tests_sdist),
os.path.join(dist_dir, tests_sdist)
)
os.rename(
os.path.join(tests_dist_dir, tests_whl),
os.path.join(dist_dir, tests_whl)
)
os.rmdir(tests_dist_dir)
return True

View File

@ -0,0 +1,73 @@
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
import os
import platform
import sys
import subprocess
run_args = [
{
'name': 'cffi',
'kwarg': 'cffi',
},
{
'name': 'openssl',
'kwarg': 'openssl',
},
{
'name': 'winlegacy',
'kwarg': 'winlegacy',
},
]
def _write_env(env, key, value):
sys.stdout.write("%s: %s\n" % (key, value))
sys.stdout.flush()
if sys.version_info < (3,):
env[key.encode('utf-8')] = value.encode('utf-8')
else:
env[key] = value
def run(**_):
"""
Runs CI, setting various env vars
:return:
A bool - if the CI ran successfully
"""
env = os.environ.copy()
options = set(sys.argv[2:])
newline = False
if 'cffi' not in options:
_write_env(env, 'OSCRYPTO_USE_CTYPES', 'true')
newline = True
if 'openssl' in options and sys.platform == 'darwin':
mac_version_info = tuple(map(int, platform.mac_ver()[0].split('.')[:2]))
if mac_version_info < (10, 15):
_write_env(env, 'OSCRYPTO_USE_OPENSSL', '/usr/lib/libcrypto.dylib,/usr/lib/libssl.dylib')
else:
_write_env(env, 'OSCRYPTO_USE_OPENSSL', '/usr/lib/libcrypto.35.dylib,/usr/lib/libssl.35.dylib')
newline = True
if 'winlegacy' in options:
_write_env(env, 'OSCRYPTO_USE_WINLEGACY', 'true')
newline = True
if newline:
sys.stdout.write("\n")
proc = subprocess.Popen(
[
sys.executable,
'run.py',
'ci',
],
env=env
)
proc.communicate()
return proc.returncode == 0

View File

@ -0,0 +1,57 @@
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
import os
import site
import sys
from . import build_root, requires_oscrypto
from ._import import _preload
deps_dir = os.path.join(build_root, 'modularcrypto-deps')
if os.path.exists(deps_dir):
site.addsitedir(deps_dir)
if sys.version_info[0:2] not in [(2, 6), (3, 2)]:
from .lint import run as run_lint
else:
run_lint = None
if sys.version_info[0:2] != (3, 2):
from .coverage import run as run_coverage
from .coverage import coverage
run_tests = None
else:
from .tests import run as run_tests
run_coverage = None
def run():
"""
Runs the linter and tests
:return:
A bool - if the linter and tests ran successfully
"""
_preload(requires_oscrypto, True)
if run_lint:
print('')
lint_result = run_lint()
else:
lint_result = True
if run_coverage:
print('\nRunning tests (via coverage.py %s)' % coverage.__version__)
sys.stdout.flush()
tests_result = run_coverage(ci=True)
else:
print('\nRunning tests')
sys.stdout.flush()
tests_result = run_tests(ci=True)
sys.stdout.flush()
return lint_result and tests_result

View File

@ -0,0 +1,5 @@
{
"slug": "wbond/asn1crypto",
"token": "98876f5e-6517-4def-85ce-c6e508eee35a",
"disabled": true
}

View File

@ -0,0 +1,677 @@
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
import cgi
import codecs
import coverage
import imp
import json
import os
import unittest
import re
import sys
import tempfile
import time
import platform as _plat
import subprocess
from fnmatch import fnmatch
from . import package_name, package_root, other_packages
if sys.version_info < (3,):
str_cls = unicode # noqa
from urllib2 import URLError
from urllib import urlencode
from io import open
else:
str_cls = str
from urllib.error import URLError
from urllib.parse import urlencode
if sys.version_info < (3, 7):
Pattern = re._pattern_type
else:
Pattern = re.Pattern
def run(ci=False):
"""
Runs the tests while measuring coverage
:param ci:
If coverage is being run in a CI environment - this triggers trying to
run the tests for the rest of modularcrypto and uploading coverage data
:return:
A bool - if the tests ran successfully
"""
xml_report_path = os.path.join(package_root, 'coverage.xml')
if os.path.exists(xml_report_path):
os.unlink(xml_report_path)
cov = coverage.Coverage(include='%s/*.py' % package_name)
cov.start()
from .tests import run as run_tests
result = run_tests(ci=ci)
print()
if ci:
suite = unittest.TestSuite()
loader = unittest.TestLoader()
for other_package in other_packages:
for test_class in _load_package_tests(other_package):
suite.addTest(loader.loadTestsFromTestCase(test_class))
if suite.countTestCases() > 0:
print('Running tests from other modularcrypto packages')
sys.stdout.flush()
runner_result = unittest.TextTestRunner(stream=sys.stdout, verbosity=1).run(suite)
result = runner_result.wasSuccessful() and result
print()
sys.stdout.flush()
cov.stop()
cov.save()
cov.report(show_missing=False)
print()
sys.stdout.flush()
if ci:
cov.xml_report()
if ci and result and os.path.exists(xml_report_path):
_codecov_submit()
print()
return result
def _load_package_tests(name):
"""
Load the test classes from another modularcrypto package
:param name:
A unicode string of the other package name
:return:
A list of unittest.TestCase classes of the tests for the package
"""
package_dir = os.path.join('..', name)
if not os.path.exists(package_dir):
return []
tests_module_info = imp.find_module('tests', [package_dir])
tests_module = imp.load_module('%s.tests' % name, *tests_module_info)
return tests_module.test_classes()
def _env_info():
"""
:return:
A two-element tuple of unicode strings. The first is the name of the
environment, the second the root of the repo. The environment name
will be one of: "ci-travis", "ci-circle", "ci-appveyor",
"ci-github-actions", "local"
"""
if os.getenv('CI') == 'true' and os.getenv('TRAVIS') == 'true':
return ('ci-travis', os.getenv('TRAVIS_BUILD_DIR'))
if os.getenv('CI') == 'True' and os.getenv('APPVEYOR') == 'True':
return ('ci-appveyor', os.getenv('APPVEYOR_BUILD_FOLDER'))
if os.getenv('CI') == 'true' and os.getenv('CIRCLECI') == 'true':
return ('ci-circle', os.getcwdu() if sys.version_info < (3,) else os.getcwd())
if os.getenv('GITHUB_ACTIONS') == 'true':
return ('ci-github-actions', os.getenv('GITHUB_WORKSPACE'))
return ('local', package_root)
def _codecov_submit():
env_name, root = _env_info()
try:
with open(os.path.join(root, 'dev/codecov.json'), 'rb') as f:
json_data = json.loads(f.read().decode('utf-8'))
except (OSError, ValueError, UnicodeDecodeError, KeyError):
print('error reading codecov.json')
return
if json_data.get('disabled'):
return
if env_name == 'ci-travis':
# http://docs.travis-ci.com/user/environment-variables/#Default-Environment-Variables
build_url = 'https://travis-ci.org/%s/jobs/%s' % (os.getenv('TRAVIS_REPO_SLUG'), os.getenv('TRAVIS_JOB_ID'))
query = {
'service': 'travis',
'branch': os.getenv('TRAVIS_BRANCH'),
'build': os.getenv('TRAVIS_JOB_NUMBER'),
'pr': os.getenv('TRAVIS_PULL_REQUEST'),
'job': os.getenv('TRAVIS_JOB_ID'),
'tag': os.getenv('TRAVIS_TAG'),
'slug': os.getenv('TRAVIS_REPO_SLUG'),
'commit': os.getenv('TRAVIS_COMMIT'),
'build_url': build_url,
}
elif env_name == 'ci-appveyor':
# http://www.appveyor.com/docs/environment-variables
build_url = 'https://ci.appveyor.com/project/%s/build/%s' % (
os.getenv('APPVEYOR_REPO_NAME'),
os.getenv('APPVEYOR_BUILD_VERSION')
)
query = {
'service': "appveyor",
'branch': os.getenv('APPVEYOR_REPO_BRANCH'),
'build': os.getenv('APPVEYOR_JOB_ID'),
'pr': os.getenv('APPVEYOR_PULL_REQUEST_NUMBER'),
'job': '/'.join((
os.getenv('APPVEYOR_ACCOUNT_NAME'),
os.getenv('APPVEYOR_PROJECT_SLUG'),
os.getenv('APPVEYOR_BUILD_VERSION')
)),
'tag': os.getenv('APPVEYOR_REPO_TAG_NAME'),
'slug': os.getenv('APPVEYOR_REPO_NAME'),
'commit': os.getenv('APPVEYOR_REPO_COMMIT'),
'build_url': build_url,
}
elif env_name == 'ci-circle':
# https://circleci.com/docs/environment-variables
query = {
'service': 'circleci',
'branch': os.getenv('CIRCLE_BRANCH'),
'build': os.getenv('CIRCLE_BUILD_NUM'),
'pr': os.getenv('CIRCLE_PR_NUMBER'),
'job': os.getenv('CIRCLE_BUILD_NUM') + "." + os.getenv('CIRCLE_NODE_INDEX'),
'tag': os.getenv('CIRCLE_TAG'),
'slug': os.getenv('CIRCLE_PROJECT_USERNAME') + "/" + os.getenv('CIRCLE_PROJECT_REPONAME'),
'commit': os.getenv('CIRCLE_SHA1'),
'build_url': os.getenv('CIRCLE_BUILD_URL'),
}
elif env_name == 'ci-github-actions':
branch = ''
tag = ''
ref = os.getenv('GITHUB_REF', '')
if ref.startswith('refs/tags/'):
tag = ref[10:]
elif ref.startswith('refs/heads/'):
branch = ref[11:]
impl = _plat.python_implementation()
major, minor = _plat.python_version_tuple()[0:2]
build_name = '%s %s %s.%s' % (_platform_name(), impl, major, minor)
query = {
'service': 'custom',
'token': json_data['token'],
'branch': branch,
'tag': tag,
'slug': os.getenv('GITHUB_REPOSITORY'),
'commit': os.getenv('GITHUB_SHA'),
'build_url': 'https://github.com/wbond/oscrypto/commit/%s/checks' % os.getenv('GITHUB_SHA'),
'name': 'GitHub Actions %s on %s' % (build_name, os.getenv('RUNNER_OS'))
}
else:
if not os.path.exists(os.path.join(root, '.git')):
print('git repository not found, not submitting coverage data')
return
git_status = _git_command(['status', '--porcelain'], root)
if git_status != '':
print('git repository has uncommitted changes, not submitting coverage data')
return
branch = _git_command(['rev-parse', '--abbrev-ref', 'HEAD'], root)
commit = _git_command(['rev-parse', '--verify', 'HEAD'], root)
tag = _git_command(['name-rev', '--tags', '--name-only', commit], root)
impl = _plat.python_implementation()
major, minor = _plat.python_version_tuple()[0:2]
build_name = '%s %s %s.%s' % (_platform_name(), impl, major, minor)
query = {
'branch': branch,
'commit': commit,
'slug': json_data['slug'],
'token': json_data['token'],
'build': build_name,
}
if tag != 'undefined':
query['tag'] = tag
payload = 'PLATFORM=%s\n' % _platform_name()
payload += 'PYTHON_VERSION=%s %s\n' % (_plat.python_version(), _plat.python_implementation())
if 'oscrypto' in sys.modules:
payload += 'OSCRYPTO_BACKEND=%s\n' % sys.modules['oscrypto'].backend()
payload += '<<<<<< ENV\n'
for path in _list_files(root):
payload += path + '\n'
payload += '<<<<<< network\n'
payload += '# path=coverage.xml\n'
with open(os.path.join(root, 'coverage.xml'), 'r', encoding='utf-8') as f:
payload += f.read() + '\n'
payload += '<<<<<< EOF\n'
url = 'https://codecov.io/upload/v4'
headers = {
'Accept': 'text/plain'
}
filtered_query = {}
for key in query:
value = query[key]
if value == '' or value is None:
continue
filtered_query[key] = value
print('Submitting coverage info to codecov.io')
info = _do_request(
'POST',
url,
headers,
query_params=filtered_query
)
encoding = info[1] or 'utf-8'
text = info[2].decode(encoding).strip()
parts = text.split()
upload_url = parts[1]
headers = {
'Content-Type': 'text/plain',
'x-amz-acl': 'public-read',
'x-amz-storage-class': 'REDUCED_REDUNDANCY'
}
print('Uploading coverage data to codecov.io S3 bucket')
_do_request(
'PUT',
upload_url,
headers,
data=payload.encode('utf-8')
)
def _git_command(params, cwd):
"""
Executes a git command, returning the output
:param params:
A list of the parameters to pass to git
:param cwd:
The working directory to execute git in
:return:
A 2-element tuple of (stdout, stderr)
"""
proc = subprocess.Popen(
['git'] + params,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
cwd=cwd
)
stdout, stderr = proc.communicate()
code = proc.wait()
if code != 0:
e = OSError('git exit code was non-zero')
e.stdout = stdout
raise e
return stdout.decode('utf-8').strip()
def _parse_env_var_file(data):
"""
Parses a basic VAR="value data" file contents into a dict
:param data:
A unicode string of the file data
:return:
A dict of parsed name/value data
"""
output = {}
for line in data.splitlines():
line = line.strip()
if not line or '=' not in line:
continue
parts = line.split('=')
if len(parts) != 2:
continue
name = parts[0]
value = parts[1]
if len(value) > 1:
if value[0] == '"' and value[-1] == '"':
value = value[1:-1]
output[name] = value
return output
def _platform_name():
"""
Returns information about the current operating system and version
:return:
A unicode string containing the OS name and version
"""
if sys.platform == 'darwin':
version = _plat.mac_ver()[0]
_plat_ver_info = tuple(map(int, version.split('.')))
if _plat_ver_info < (10, 12):
name = 'OS X'
else:
name = 'macOS'
return '%s %s' % (name, version)
elif sys.platform == 'win32':
_win_ver = sys.getwindowsversion()
_plat_ver_info = (_win_ver[0], _win_ver[1])
return 'Windows %s' % _plat.win32_ver()[0]
elif sys.platform in ['linux', 'linux2']:
if os.path.exists('/etc/os-release'):
with open('/etc/os-release', 'r', encoding='utf-8') as f:
pairs = _parse_env_var_file(f.read())
if 'NAME' in pairs and 'VERSION_ID' in pairs:
return '%s %s' % (pairs['NAME'], pairs['VERSION_ID'])
version = pairs['VERSION_ID']
elif 'PRETTY_NAME' in pairs:
return pairs['PRETTY_NAME']
elif 'NAME' in pairs:
return pairs['NAME']
else:
raise ValueError('No suitable version info found in /etc/os-release')
elif os.path.exists('/etc/lsb-release'):
with open('/etc/lsb-release', 'r', encoding='utf-8') as f:
pairs = _parse_env_var_file(f.read())
if 'DISTRIB_DESCRIPTION' in pairs:
return pairs['DISTRIB_DESCRIPTION']
else:
raise ValueError('No suitable version info found in /etc/lsb-release')
else:
return 'Linux'
else:
return '%s %s' % (_plat.system(), _plat.release())
def _list_files(root):
"""
Lists all of the files in a directory, taking into account any .gitignore
file that is present
:param root:
A unicode filesystem path
:return:
A list of unicode strings, containing paths of all files not ignored
by .gitignore with root, using relative paths
"""
dir_patterns, file_patterns = _gitignore(root)
paths = []
prefix = os.path.abspath(root) + os.sep
for base, dirs, files in os.walk(root):
for d in dirs:
for dir_pattern in dir_patterns:
if fnmatch(d, dir_pattern):
dirs.remove(d)
break
for f in files:
skip = False
for file_pattern in file_patterns:
if fnmatch(f, file_pattern):
skip = True
break
if skip:
continue
full_path = os.path.join(base, f)
if full_path[:len(prefix)] == prefix:
full_path = full_path[len(prefix):]
paths.append(full_path)
return sorted(paths)
def _gitignore(root):
"""
Parses a .gitignore file and returns patterns to match dirs and files.
Only basic gitignore patterns are supported. Pattern negation, ** wildcards
and anchored patterns are not currently implemented.
:param root:
A unicode string of the path to the git repository
:return:
A 2-element tuple:
- 0: a list of unicode strings to match against dirs
- 1: a list of unicode strings to match against dirs and files
"""
gitignore_path = os.path.join(root, '.gitignore')
dir_patterns = ['.git']
file_patterns = []
if not os.path.exists(gitignore_path):
return (dir_patterns, file_patterns)
with open(gitignore_path, 'r', encoding='utf-8') as f:
for line in f.readlines():
line = line.strip()
if not line:
continue
if line.startswith('#'):
continue
if '**' in line:
raise NotImplementedError('gitignore ** wildcards are not implemented')
if line.startswith('!'):
raise NotImplementedError('gitignore pattern negation is not implemented')
if line.startswith('/'):
raise NotImplementedError('gitignore anchored patterns are not implemented')
if line.startswith('\\#'):
line = '#' + line[2:]
if line.startswith('\\!'):
line = '!' + line[2:]
if line.endswith('/'):
dir_patterns.append(line[:-1])
else:
file_patterns.append(line)
return (dir_patterns, file_patterns)
def _do_request(method, url, headers, data=None, query_params=None, timeout=20):
"""
Performs an HTTP request
:param method:
A unicode string of 'POST' or 'PUT'
:param url;
A unicode string of the URL to request
:param headers:
A dict of unicode strings, where keys are header names and values are
the header values.
:param data:
A dict of unicode strings (to be encoded as
application/x-www-form-urlencoded), or a byte string of data.
:param query_params:
A dict of unicode keys and values to pass as query params
:param timeout:
An integer number of seconds to use as the timeout
:return:
A 3-element tuple:
- 0: A unicode string of the response content-type
- 1: A unicode string of the response encoding, or None
- 2: A byte string of the response body
"""
if query_params:
url += '?' + urlencode(query_params).replace('+', '%20')
if isinstance(data, dict):
data_bytes = {}
for key in data:
data_bytes[key.encode('utf-8')] = data[key].encode('utf-8')
data = urlencode(data_bytes)
headers['Content-Type'] = 'application/x-www-form-urlencoded'
if isinstance(data, str_cls):
raise TypeError('data must be a byte string')
try:
tempfd, tempf_path = tempfile.mkstemp('-coverage')
os.write(tempfd, data or b'')
os.close(tempfd)
if sys.platform == 'win32':
powershell_exe = os.path.join('system32\\WindowsPowerShell\\v1.0\\powershell.exe')
code = "[System.Net.ServicePointManager]::SecurityProtocol = [System.Net.SecurityProtocolType]::Tls12;"
code += "$wc = New-Object Net.WebClient;"
for key in headers:
code += "$wc.Headers.add('%s','%s');" % (key, headers[key])
code += "$out = $wc.UploadFile('%s', '%s', '%s');" % (url, method, tempf_path)
code += "[System.Text.Encoding]::GetEncoding('ISO-8859-1').GetString($wc.ResponseHeaders.ToByteArray())"
# To properly obtain bytes, we use BitConverter to get hex dash
# encoding (e.g. AE-09-3F) and they decode in python
code += " + [System.BitConverter]::ToString($out);"
stdout, stderr = _execute(
[powershell_exe, '-Command', code],
os.getcwd(),
re.compile(r'Unable to connect to|TLS|Internal Server Error'),
6
)
if stdout[-2:] == b'\r\n' and b'\r\n\r\n' in stdout:
# An extra trailing crlf is added at the end by powershell
stdout = stdout[0:-2]
parts = stdout.split(b'\r\n\r\n', 1)
if len(parts) == 2:
stdout = parts[0] + b'\r\n\r\n' + codecs.decode(parts[1].replace(b'-', b''), 'hex_codec')
else:
args = [
'curl',
'--http1.1',
'--connect-timeout', '5',
'--request',
method,
'--location',
'--silent',
'--show-error',
'--include',
# Prevent curl from asking for an HTTP "100 Continue" response
'--header', 'Expect:'
]
for key in headers:
args.append('--header')
args.append("%s: %s" % (key, headers[key]))
args.append('--data-binary')
args.append('@%s' % tempf_path)
args.append(url)
stdout, stderr = _execute(
args,
os.getcwd(),
re.compile(r'Failed to connect to|TLS|SSLRead|outstanding|cleanly|timed out'),
6
)
finally:
if tempf_path and os.path.exists(tempf_path):
os.remove(tempf_path)
if len(stderr) > 0:
raise URLError("Error %sing %s:\n%s" % (method, url, stderr))
parts = stdout.split(b'\r\n\r\n', 1)
if len(parts) != 2:
raise URLError("Error %sing %s, response data malformed:\n%s" % (method, url, stdout))
header_block, body = parts
content_type_header = None
content_len_header = None
for hline in header_block.decode('iso-8859-1').splitlines():
hline_parts = hline.split(':', 1)
if len(hline_parts) != 2:
continue
name, val = hline_parts
name = name.strip().lower()
val = val.strip()
if name == 'content-type':
content_type_header = val
if name == 'content-length':
content_len_header = val
if content_type_header is None and content_len_header != '0':
raise URLError("Error %sing %s, no content-type header:\n%s" % (method, url, stdout))
if content_type_header is None:
content_type = 'text/plain'
encoding = 'utf-8'
else:
content_type, params = cgi.parse_header(content_type_header)
encoding = params.get('charset')
return (content_type, encoding, body)
def _execute(params, cwd, retry=None, retries=0, backoff=2):
"""
Executes a subprocess
:param params:
A list of the executable and arguments to pass to it
:param cwd:
The working directory to execute the command in
:param retry:
If this string is present in stderr, or regex pattern matches stderr, retry the operation
:param retries:
An integer number of times to retry
:return:
A 2-element tuple of (stdout, stderr)
"""
proc = subprocess.Popen(
params,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=cwd
)
stdout, stderr = proc.communicate()
code = proc.wait()
if code != 0:
if retry and retries > 0:
stderr_str = stderr.decode('utf-8')
if isinstance(retry, Pattern):
if retry.search(stderr_str) is not None:
time.sleep(backoff)
return _execute(params, cwd, retry, retries - 1, backoff * 2)
elif retry in stderr_str:
time.sleep(backoff)
return _execute(params, cwd, retry, retries - 1, backoff * 2)
e = OSError('subprocess exit code for "%s" was %d: %s' % (' '.join(params), code, stderr))
e.stdout = stdout
e.stderr = stderr
raise e
return (stdout, stderr)
if __name__ == '__main__':
_codecov_submit()

View File

@ -0,0 +1,712 @@
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
import os
import subprocess
import sys
import shutil
import re
import json
import tarfile
import zipfile
from . import package_root, build_root, other_packages
from ._pep425 import _pep425tags, _pep425_implementation
if sys.version_info < (3,):
str_cls = unicode # noqa
else:
str_cls = str
def run():
"""
Installs required development dependencies. Uses git to checkout other
modularcrypto repos for more accurate coverage data.
"""
deps_dir = os.path.join(build_root, 'modularcrypto-deps')
if os.path.exists(deps_dir):
shutil.rmtree(deps_dir, ignore_errors=True)
os.mkdir(deps_dir)
try:
print("Staging ci dependencies")
_stage_requirements(deps_dir, os.path.join(package_root, 'requires', 'ci'))
print("Checking out modularcrypto packages for coverage")
for other_package in other_packages:
pkg_url = 'https://github.com/wbond/%s.git' % other_package
pkg_dir = os.path.join(build_root, other_package)
if os.path.exists(pkg_dir):
print("%s is already present" % other_package)
continue
print("Cloning %s" % pkg_url)
_execute(['git', 'clone', pkg_url], build_root)
print()
except (Exception):
if os.path.exists(deps_dir):
shutil.rmtree(deps_dir, ignore_errors=True)
raise
return True
def _download(url, dest):
"""
Downloads a URL to a directory
:param url:
The URL to download
:param dest:
The path to the directory to save the file in
:return:
The filesystem path to the saved file
"""
print('Downloading %s' % url)
filename = os.path.basename(url)
dest_path = os.path.join(dest, filename)
if sys.platform == 'win32':
powershell_exe = os.path.join('system32\\WindowsPowerShell\\v1.0\\powershell.exe')
code = "[System.Net.ServicePointManager]::SecurityProtocol = [System.Net.SecurityProtocolType]::Tls12;"
code += "(New-Object Net.WebClient).DownloadFile('%s', '%s');" % (url, dest_path)
_execute([powershell_exe, '-Command', code], dest, 'Unable to connect to')
else:
_execute(
['curl', '-L', '--silent', '--show-error', '-O', url],
dest,
'Failed to connect to'
)
return dest_path
def _tuple_from_ver(version_string):
"""
:param version_string:
A unicode dotted version string
:return:
A tuple of integers
"""
match = re.search(
r'(\d+(?:\.\d+)*)'
r'([-._]?(?:alpha|a|beta|b|preview|pre|c|rc)\.?\d*)?'
r'(-\d+|(?:[-._]?(?:rev|r|post)\.?\d*))?'
r'([-._]?dev\.?\d*)?',
version_string
)
if not match:
return tuple()
nums = tuple(map(int, match.group(1).split('.')))
pre = match.group(2)
if pre:
pre = pre.replace('alpha', 'a')
pre = pre.replace('beta', 'b')
pre = pre.replace('preview', 'rc')
pre = pre.replace('pre', 'rc')
pre = re.sub(r'(?<!r)c', 'rc', pre)
pre = pre.lstrip('._-')
pre_dig_match = re.search(r'\d+', pre)
if pre_dig_match:
pre_dig = int(pre_dig_match.group(0))
else:
pre_dig = 0
pre = pre.rstrip('0123456789')
pre_num = {
'a': -3,
'b': -2,
'rc': -1,
}[pre]
pre_tup = (pre_num, pre_dig)
else:
pre_tup = tuple()
post = match.group(3)
if post:
post_dig_match = re.search(r'\d+', post)
if post_dig_match:
post_dig = int(post_dig_match.group(0))
else:
post_dig = 0
post_tup = (1, post_dig)
else:
post_tup = tuple()
dev = match.group(4)
if dev:
dev_dig_match = re.search(r'\d+', dev)
if dev_dig_match:
dev_dig = int(dev_dig_match.group(0))
else:
dev_dig = 0
dev_tup = (-4, dev_dig)
else:
dev_tup = tuple()
normalized = [nums]
if pre_tup:
normalized.append(pre_tup)
if post_tup:
normalized.append(post_tup)
if dev_tup:
normalized.append(dev_tup)
# This ensures regular releases happen after dev and prerelease, but
# before post releases
if not pre_tup and not post_tup and not dev_tup:
normalized.append((0, 0))
return tuple(normalized)
def _open_archive(path):
"""
:param path:
A unicode string of the filesystem path to the archive
:return:
An archive object
"""
if path.endswith('.zip'):
return zipfile.ZipFile(path, 'r')
return tarfile.open(path, 'r')
def _list_archive_members(archive):
"""
:param archive:
An archive from _open_archive()
:return:
A list of info objects to be used with _info_name() and _extract_info()
"""
if isinstance(archive, zipfile.ZipFile):
return archive.infolist()
return archive.getmembers()
def _archive_single_dir(archive):
"""
Check if all members of the archive are in a single top-level directory
:param archive:
An archive from _open_archive()
:return:
None if not a single top level directory in archive, otherwise a
unicode string of the top level directory name
"""
common_root = None
for info in _list_archive_members(archive):
fn = _info_name(info)
if fn in set(['.', '/']):
continue
sep = None
if '/' in fn:
sep = '/'
elif '\\' in fn:
sep = '\\'
if sep is None:
root_dir = fn
else:
root_dir, _ = fn.split(sep, 1)
if common_root is None:
common_root = root_dir
else:
if common_root != root_dir:
return None
return common_root
def _info_name(info):
"""
Returns a normalized file path for an archive info object
:param info:
An info object from _list_archive_members()
:return:
A unicode string with all directory separators normalized to "/"
"""
if isinstance(info, zipfile.ZipInfo):
return info.filename.replace('\\', '/')
return info.name.replace('\\', '/')
def _extract_info(archive, info):
"""
Extracts the contents of an archive info object
;param archive:
An archive from _open_archive()
:param info:
An info object from _list_archive_members()
:return:
None, or a byte string of the file contents
"""
if isinstance(archive, zipfile.ZipFile):
fn = info.filename
is_dir = fn.endswith('/') or fn.endswith('\\')
out = archive.read(info)
if is_dir and out == b'':
return None
return out
info_file = archive.extractfile(info)
if info_file:
return info_file.read()
return None
def _extract_package(deps_dir, pkg_path, pkg_dir):
"""
Extract a .whl, .zip, .tar.gz or .tar.bz2 into a package path to
use when running CI tasks
:param deps_dir:
A unicode string of the directory the package should be extracted to
:param pkg_path:
A unicode string of the path to the archive
:param pkg_dir:
If running setup.py, change to this dir first - a unicode string
"""
if pkg_path.endswith('.exe'):
try:
zf = None
zf = zipfile.ZipFile(pkg_path, 'r')
# Exes have a PLATLIB folder containing everything we want
for zi in zf.infolist():
if not zi.filename.startswith('PLATLIB'):
continue
data = _extract_info(zf, zi)
if data is not None:
dst_path = os.path.join(deps_dir, zi.filename[8:])
dst_dir = os.path.dirname(dst_path)
if not os.path.exists(dst_dir):
os.makedirs(dst_dir)
with open(dst_path, 'wb') as f:
f.write(data)
finally:
if zf:
zf.close()
return
if pkg_path.endswith('.whl'):
try:
zf = None
zf = zipfile.ZipFile(pkg_path, 'r')
# Wheels contain exactly what we need and nothing else
zf.extractall(deps_dir)
finally:
if zf:
zf.close()
return
# Source archives may contain a bunch of other things, including mutliple
# packages, so we must use setup.py/setuptool to install/extract it
ar = None
staging_dir = os.path.join(deps_dir, '_staging')
try:
ar = _open_archive(pkg_path)
common_root = _archive_single_dir(ar)
members = []
for info in _list_archive_members(ar):
dst_rel_path = _info_name(info)
if common_root is not None:
dst_rel_path = dst_rel_path[len(common_root) + 1:]
members.append((info, dst_rel_path))
if not os.path.exists(staging_dir):
os.makedirs(staging_dir)
for info, rel_path in members:
info_data = _extract_info(ar, info)
# Dirs won't return a file
if info_data is not None:
dst_path = os.path.join(staging_dir, rel_path)
dst_dir = os.path.dirname(dst_path)
if not os.path.exists(dst_dir):
os.makedirs(dst_dir)
with open(dst_path, 'wb') as f:
f.write(info_data)
setup_dir = staging_dir
if pkg_dir:
setup_dir = os.path.join(staging_dir, pkg_dir)
root = os.path.abspath(os.path.join(deps_dir, '..'))
install_lib = os.path.basename(deps_dir)
# Ensure we pick up previously installed packages when running
# setup.py. This is important for things like setuptools.
env = os.environ.copy()
if sys.version_info >= (3,):
env['PYTHONPATH'] = deps_dir
else:
env[b'PYTHONPATH'] = deps_dir.encode('utf-8')
_execute(
[
sys.executable,
'setup.py',
'install',
'--root=%s' % root,
'--install-lib=%s' % install_lib,
'--no-compile'
],
setup_dir,
env=env
)
finally:
if ar:
ar.close()
if staging_dir:
shutil.rmtree(staging_dir)
def _sort_pep440_versions(releases, include_prerelease):
"""
:param releases:
A list of unicode string PEP 440 version numbers
:param include_prerelease:
A boolean indicating if prerelease versions should be included
:return:
A sorted generator of 2-element tuples:
0: A unicode string containing a PEP 440 version number
1: A tuple of tuples containing integers - this is the output of
_tuple_from_ver() for the PEP 440 version number and is intended
for comparing versions
"""
parsed_versions = []
for v in releases:
t = _tuple_from_ver(v)
if not include_prerelease and t[1][0] < 0:
continue
parsed_versions.append((v, t))
return sorted(parsed_versions, key=lambda v: v[1])
def _is_valid_python_version(python_version, requires_python):
"""
Verifies the "python_version" and "requires_python" keys from a PyPi
download record are applicable to the current version of Python
:param python_version:
The "python_version" value from a PyPi download JSON structure. This
should be one of: "py2", "py3", "py2.py3" or "source".
:param requires_python:
The "requires_python" value from a PyPi download JSON structure. This
will be None, or a comma-separated list of conditions that must be
true. Ex: ">=3.5", "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
"""
if python_version == "py2" and sys.version_info >= (3,):
return False
if python_version == "py3" and sys.version_info < (3,):
return False
if requires_python is not None:
def _ver_tuples(ver_str):
ver_str = ver_str.strip()
if ver_str.endswith('.*'):
ver_str = ver_str[:-2]
cond_tup = tuple(map(int, ver_str.split('.')))
return (sys.version_info[:len(cond_tup)], cond_tup)
for part in map(str_cls.strip, requires_python.split(',')):
if part.startswith('!='):
sys_tup, cond_tup = _ver_tuples(part[2:])
if sys_tup == cond_tup:
return False
elif part.startswith('>='):
sys_tup, cond_tup = _ver_tuples(part[2:])
if sys_tup < cond_tup:
return False
elif part.startswith('>'):
sys_tup, cond_tup = _ver_tuples(part[1:])
if sys_tup <= cond_tup:
return False
elif part.startswith('<='):
sys_tup, cond_tup = _ver_tuples(part[2:])
if sys_tup > cond_tup:
return False
elif part.startswith('<'):
sys_tup, cond_tup = _ver_tuples(part[1:])
if sys_tup >= cond_tup:
return False
elif part.startswith('=='):
sys_tup, cond_tup = _ver_tuples(part[2:])
if sys_tup != cond_tup:
return False
return True
def _locate_suitable_download(downloads):
"""
:param downloads:
A list of dicts containing a key "url", "python_version" and
"requires_python"
:return:
A unicode string URL, or None if not a valid release for the current
version of Python
"""
valid_tags = _pep425tags()
exe_suffix = None
if sys.platform == 'win32' and _pep425_implementation() == 'cp':
win_arch = 'win32' if sys.maxsize == 2147483647 else 'win-amd64'
version_info = sys.version_info
exe_suffix = '.%s-py%d.%d.exe' % (win_arch, version_info[0], version_info[1])
wheels = {}
whl = None
tar_bz2 = None
tar_gz = None
exe = None
for download in downloads:
if not _is_valid_python_version(download.get('python_version'), download.get('requires_python')):
continue
if exe_suffix and download['url'].endswith(exe_suffix):
exe = download['url']
if download['url'].endswith('.whl'):
parts = os.path.basename(download['url']).split('-')
tag_impl = parts[-3]
tag_abi = parts[-2]
tag_arch = parts[-1].split('.')[0]
wheels[(tag_impl, tag_abi, tag_arch)] = download['url']
if download['url'].endswith('.tar.bz2'):
tar_bz2 = download['url']
if download['url'].endswith('.tar.gz'):
tar_gz = download['url']
# Find the most-specific wheel possible
for tag in valid_tags:
if tag in wheels:
whl = wheels[tag]
break
if exe_suffix and exe:
url = exe
elif whl:
url = whl
elif tar_bz2:
url = tar_bz2
elif tar_gz:
url = tar_gz
else:
return None
return url
def _stage_requirements(deps_dir, path):
"""
Installs requirements without using Python to download, since
different services are limiting to TLS 1.2, and older version of
Python do not support that
:param deps_dir:
A unicode path to a temporary diretory to use for downloads
:param path:
A unicode filesystem path to a requirements file
"""
packages = _parse_requires(path)
for p in packages:
url = None
pkg = p['pkg']
pkg_sub_dir = None
if p['type'] == 'url':
anchor = None
if '#' in pkg:
pkg, anchor = pkg.split('#', 1)
if '&' in anchor:
parts = anchor.split('&')
else:
parts = [anchor]
for part in parts:
param, value = part.split('=')
if param == 'subdirectory':
pkg_sub_dir = value
if pkg.endswith('.zip') or pkg.endswith('.tar.gz') or pkg.endswith('.tar.bz2') or pkg.endswith('.whl'):
url = pkg
else:
raise Exception('Unable to install package from URL that is not an archive')
else:
pypi_json_url = 'https://pypi.org/pypi/%s/json' % pkg
json_dest = _download(pypi_json_url, deps_dir)
with open(json_dest, 'rb') as f:
pkg_info = json.loads(f.read().decode('utf-8'))
if os.path.exists(json_dest):
os.remove(json_dest)
if p['type'] == '==':
if p['ver'] not in pkg_info['releases']:
raise Exception('Unable to find version %s of %s' % (p['ver'], pkg))
url = _locate_suitable_download(pkg_info['releases'][p['ver']])
if not url:
raise Exception('Unable to find a compatible download of %s == %s' % (pkg, p['ver']))
else:
p_ver_tup = _tuple_from_ver(p['ver'])
for ver_str, ver_tup in reversed(_sort_pep440_versions(pkg_info['releases'], False)):
if p['type'] == '>=' and ver_tup < p_ver_tup:
break
url = _locate_suitable_download(pkg_info['releases'][ver_str])
if url:
break
if not url:
if p['type'] == '>=':
raise Exception('Unable to find a compatible download of %s >= %s' % (pkg, p['ver']))
else:
raise Exception('Unable to find a compatible download of %s' % pkg)
local_path = _download(url, deps_dir)
_extract_package(deps_dir, local_path, pkg_sub_dir)
os.remove(local_path)
def _parse_requires(path):
"""
Does basic parsing of pip requirements files, to allow for
using something other than Python to do actual TLS requests
:param path:
A path to a requirements file
:return:
A list of dict objects containing the keys:
- 'type' ('any', 'url', '==', '>=')
- 'pkg'
- 'ver' (if 'type' == '==' or 'type' == '>=')
"""
python_version = '.'.join(map(str_cls, sys.version_info[0:2]))
sys_platform = sys.platform
packages = []
with open(path, 'rb') as f:
contents = f.read().decode('utf-8')
for line in re.split(r'\r?\n', contents):
line = line.strip()
if not len(line):
continue
if re.match(r'^\s*#', line):
continue
if ';' in line:
package, cond = line.split(';', 1)
package = package.strip()
cond = cond.strip()
cond = cond.replace('sys_platform', repr(sys_platform))
cond = cond.replace('python_version', repr(python_version))
if not eval(cond):
continue
else:
package = line.strip()
if re.match(r'^\s*-r\s*', package):
sub_req_file = re.sub(r'^\s*-r\s*', '', package)
sub_req_file = os.path.abspath(os.path.join(os.path.dirname(path), sub_req_file))
packages.extend(_parse_requires(sub_req_file))
continue
if re.match(r'https?://', package):
packages.append({'type': 'url', 'pkg': package})
continue
if '>=' in package:
parts = package.split('>=')
package = parts[0].strip()
ver = parts[1].strip()
packages.append({'type': '>=', 'pkg': package, 'ver': ver})
continue
if '==' in package:
parts = package.split('==')
package = parts[0].strip()
ver = parts[1].strip()
packages.append({'type': '==', 'pkg': package, 'ver': ver})
continue
if re.search(r'[^ a-zA-Z0-9\-]', package):
raise Exception('Unsupported requirements format version constraint: %s' % package)
packages.append({'type': 'any', 'pkg': package})
return packages
def _execute(params, cwd, retry=None, env=None):
"""
Executes a subprocess
:param params:
A list of the executable and arguments to pass to it
:param cwd:
The working directory to execute the command in
:param retry:
If this string is present in stderr, retry the operation
:return:
A 2-element tuple of (stdout, stderr)
"""
proc = subprocess.Popen(
params,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=cwd,
env=env
)
stdout, stderr = proc.communicate()
code = proc.wait()
if code != 0:
if retry and retry in stderr.decode('utf-8'):
return _execute(params, cwd)
e = OSError('subprocess exit code for "%s" was %d: %s' % (' '.join(params), code, stderr))
e.stdout = stdout
e.stderr = stderr
raise e
return (stdout, stderr)

View File

@ -0,0 +1,38 @@
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
import os
from . import package_name, package_root
import flake8
if not hasattr(flake8, '__version_info__') or flake8.__version_info__ < (3,):
from flake8.engine import get_style_guide
else:
from flake8.api.legacy import get_style_guide
def run():
"""
Runs flake8 lint
:return:
A bool - if flake8 did not find any errors
"""
print('Running flake8 %s' % flake8.__version__)
flake8_style = get_style_guide(config_file=os.path.join(package_root, 'tox.ini'))
paths = []
for _dir in [package_name, 'dev', 'tests']:
for root, _, filenames in os.walk(_dir):
for filename in filenames:
if not filename.endswith('.py'):
continue
paths.append(os.path.join(root, filename))
report = flake8_style.check_files(paths)
success = report.total_errors == 0
if success:
print('OK')
return success

View File

@ -0,0 +1,144 @@
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
import os
import subprocess
import sys
run_args = [
{
'name': 'version',
'kwarg': 'version',
},
]
def _write_env(env, key, value):
sys.stdout.write("%s: %s\n" % (key, value))
sys.stdout.flush()
if sys.version_info < (3,):
env[key.encode('utf-8')] = value.encode('utf-8')
else:
env[key] = value
def run(version=None):
"""
Installs a version of Python on Mac using pyenv
:return:
A bool - if Python was installed successfully
"""
if sys.platform == 'win32':
raise ValueError('pyenv-install is not designed for Windows')
if version not in set(['2.6', '3.3']):
raise ValueError('Invalid version: %r' % version)
python_path = os.path.expanduser('~/.pyenv/versions/%s/bin' % version)
if os.path.exists(os.path.join(python_path, 'python')):
print(python_path)
return True
stdout = ""
stderr = ""
proc = subprocess.Popen(
'command -v pyenv',
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
proc.communicate()
if proc.returncode != 0:
proc = subprocess.Popen(
['brew', 'install', 'pyenv'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
so, se = proc.communicate()
stdout += so.decode('utf-8')
stderr += se.decode('utf-8')
if proc.returncode != 0:
print(stdout)
print(stderr, file=sys.stderr)
return False
pyenv_script = './%s' % version
try:
with open(pyenv_script, 'wb') as f:
if version == '2.6':
contents = '#require_gcc\n' \
'install_package "openssl-1.0.2k" "https://www.openssl.org/source/old/1.0.2/openssl-1.0.2k.tar.gz' \
'#6b3977c61f2aedf0f96367dcfb5c6e578cf37e7b8d913b4ecb6643c3cb88d8c0" mac_openssl\n' \
'install_package "readline-8.0" "https://ftpmirror.gnu.org/readline/readline-8.0.tar.gz' \
'#e339f51971478d369f8a053a330a190781acb9864cf4c541060f12078948e461" mac_readline' \
' --if has_broken_mac_readline\n' \
'install_package "Python-2.6.9" "https://www.python.org/ftp/python/2.6.9/Python-2.6.9.tgz' \
'#7277b1285d8a82f374ef6ebaac85b003266f7939b3f2a24a3af52f9523ac94db" standard verify_py26'
elif version == '3.3':
contents = '#require_gcc\n' \
'install_package "openssl-1.0.2k" "https://www.openssl.org/source/old/1.0.2/openssl-1.0.2k.tar.gz' \
'#6b3977c61f2aedf0f96367dcfb5c6e578cf37e7b8d913b4ecb6643c3cb88d8c0" mac_openssl\n' \
'install_package "readline-8.0" "https://ftpmirror.gnu.org/readline/readline-8.0.tar.gz' \
'#e339f51971478d369f8a053a330a190781acb9864cf4c541060f12078948e461" mac_readline' \
' --if has_broken_mac_readline\n' \
'install_package "Python-3.3.7" "https://www.python.org/ftp/python/3.3.7/Python-3.3.7.tar.xz' \
'#85f60c327501c36bc18c33370c14d472801e6af2f901dafbba056f61685429fe" standard verify_py33'
f.write(contents.encode('utf-8'))
args = ['pyenv', 'install', pyenv_script]
stdin = None
stdin_contents = None
env = os.environ.copy()
if version == '2.6':
_write_env(env, 'PYTHON_CONFIGURE_OPTS', '--enable-ipv6')
stdin = subprocess.PIPE
stdin_contents = '--- configure 2021-08-05 20:17:26.000000000 -0400\n' \
'+++ configure 2021-08-05 20:21:30.000000000 -0400\n' \
'@@ -10300,17 +10300,8 @@\n' \
' rm -f core conftest.err conftest.$ac_objext \\\n' \
' conftest$ac_exeext conftest.$ac_ext\n' \
' \n' \
'-if test "$buggygetaddrinfo" = "yes"; then\n' \
'-\tif test "$ipv6" = "yes"; then\n' \
'-\t\techo \'Fatal: You must get working getaddrinfo() function.\'\n' \
'-\t\techo \' or you can specify "--disable-ipv6"\'.\n' \
'-\t\texit 1\n' \
'-\tfi\n' \
'-else\n' \
'-\n' \
' $as_echo "#define HAVE_GETADDRINFO 1" >>confdefs.h\n' \
' \n' \
'-fi\n' \
' for ac_func in getnameinfo\n' \
' do :\n' \
' ac_fn_c_check_func "$LINENO" "getnameinfo" "ac_cv_func_getnameinfo"'
stdin_contents = stdin_contents.encode('ascii')
args.append('--patch')
proc = subprocess.Popen(
args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
stdin=stdin,
env=env
)
so, se = proc.communicate(stdin_contents)
stdout += so.decode('utf-8')
stderr += se.decode('utf-8')
if proc.returncode != 0:
print(stdout)
print(stderr, file=sys.stderr)
return False
finally:
if os.path.exists(pyenv_script):
os.unlink(pyenv_script)
print(python_path)
return True

View File

@ -0,0 +1,77 @@
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
import os
import shutil
import subprocess
import sys
from urllib.parse import urlparse
from urllib.request import urlopen
run_args = [
{
'name': 'version',
'kwarg': 'version',
},
{
'name': 'arch',
'kwarg': 'arch',
},
]
def run(version=None, arch=None):
"""
Installs a version of Python on Windows
:return:
A bool - if Python was installed successfully
"""
if sys.platform != 'win32':
raise ValueError('python-install is only designed for Windows')
if version not in set(['2.6', '3.3']):
raise ValueError('Invalid version: %r' % version)
if arch not in set(['x86', 'x64']):
raise ValueError('Invalid arch: %r' % arch)
if version == '2.6':
if arch == 'x64':
url = 'https://www.python.org/ftp/python/2.6.6/python-2.6.6.amd64.msi'
else:
url = 'https://www.python.org/ftp/python/2.6.6/python-2.6.6.msi'
else:
if arch == 'x64':
url = 'https://www.python.org/ftp/python/3.3.5/python-3.3.5.amd64.msi'
else:
url = 'https://www.python.org/ftp/python/3.3.5/python-3.3.5.msi'
home = os.environ.get('USERPROFILE')
msi_filename = os.path.basename(urlparse(url).path)
msi_path = os.path.join(home, msi_filename)
install_path = os.path.join(os.environ.get('LOCALAPPDATA'), 'Python%s-%s' % (version, arch))
if os.path.exists(os.path.join(install_path, 'python.exe')):
print(install_path)
return True
try:
with urlopen(url) as r, open(msi_path, 'wb') as f:
shutil.copyfileobj(r, f)
proc = subprocess.Popen(
'msiexec /passive /a %s TARGETDIR=%s' % (msi_filename, install_path),
shell=True,
cwd=home
)
proc.communicate()
finally:
if os.path.exists(msi_path):
os.unlink(msi_path)
print(install_path)
return True

View File

@ -0,0 +1,60 @@
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
import subprocess
import sys
import twine.cli
from . import package_name, package_root, has_tests_package
from .build import run as build
def run():
"""
Creates a sdist .tar.gz and a bdist_wheel --univeral .whl and uploads
them to pypi
:return:
A bool - if the packaging and upload process was successful
"""
git_wc_proc = subprocess.Popen(
['git', 'status', '--porcelain', '-uno'],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
cwd=package_root
)
git_wc_status, _ = git_wc_proc.communicate()
if len(git_wc_status) > 0:
print(git_wc_status.decode('utf-8').rstrip(), file=sys.stderr)
print('Unable to perform release since working copy is not clean', file=sys.stderr)
return False
git_tag_proc = subprocess.Popen(
['git', 'tag', '-l', '--contains', 'HEAD'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=package_root
)
tag, tag_error = git_tag_proc.communicate()
if len(tag_error) > 0:
print(tag_error.decode('utf-8').rstrip(), file=sys.stderr)
print('Error looking for current git tag', file=sys.stderr)
return False
if len(tag) == 0:
print('No git tag found on HEAD', file=sys.stderr)
return False
tag = tag.decode('ascii').strip()
build()
twine.cli.dispatch(['upload', 'dist/%s-%s*' % (package_name, tag)])
if has_tests_package:
twine.cli.dispatch(['upload', 'dist/%s_tests-%s*' % (package_name, tag)])
return True

View File

@ -0,0 +1,91 @@
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
import unittest
import re
import sys
import warnings
from . import requires_oscrypto
from ._import import _preload
from tests import test_classes
if sys.version_info < (3,):
range = xrange # noqa
from cStringIO import StringIO
else:
from io import StringIO
run_args = [
{
'name': 'regex',
'kwarg': 'matcher',
},
{
'name': 'repeat_count',
'kwarg': 'repeat',
'cast': 'int',
},
]
def run(matcher=None, repeat=1, ci=False):
"""
Runs the tests
:param matcher:
A unicode string containing a regular expression to use to filter test
names by. A value of None will cause no filtering.
:param repeat:
An integer - the number of times to run the tests
:param ci:
A bool, indicating if the tests are being run as part of CI
:return:
A bool - if the tests succeeded
"""
_preload(requires_oscrypto, not ci)
warnings.filterwarnings("error")
loader = unittest.TestLoader()
# We have to manually track the list of applicable tests because for
# some reason with Python 3.4 on Windows, the tests in a suite are replaced
# with None after being executed. This breaks the repeat functionality.
test_list = []
for test_class in test_classes():
if matcher:
names = loader.getTestCaseNames(test_class)
for name in names:
if re.search(matcher, name):
test_list.append(test_class(name))
else:
test_list.append(loader.loadTestsFromTestCase(test_class))
stream = sys.stdout
verbosity = 1
if matcher and repeat == 1:
verbosity = 2
elif repeat > 1:
stream = StringIO()
for _ in range(0, repeat):
suite = unittest.TestSuite()
for test in test_list:
suite.addTest(test)
result = unittest.TextTestRunner(stream=stream, verbosity=verbosity).run(suite)
if len(result.errors) > 0 or len(result.failures) > 0:
if repeat > 1:
print(stream.getvalue())
return False
if repeat > 1:
stream.truncate(0)
return True

View File

@ -0,0 +1,88 @@
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
import codecs
import os
import re
from . import package_root, package_name, has_tests_package
run_args = [
{
'name': 'pep440_version',
'required': True
},
]
def run(new_version):
"""
Updates the package version in the various locations
:param new_version:
A unicode string of the new library version as a PEP 440 version
:return:
A bool - if the version number was successfully bumped
"""
# We use a restricted form of PEP 440 versions
version_match = re.match(
r'(\d+)\.(\d+)\.(\d)+(?:\.((?:dev|a|b|rc)\d+))?$',
new_version
)
if not version_match:
raise ValueError('Invalid PEP 440 version: %s' % new_version)
new_version_info = (
int(version_match.group(1)),
int(version_match.group(2)),
int(version_match.group(3)),
)
if version_match.group(4):
new_version_info += (version_match.group(4),)
version_path = os.path.join(package_root, package_name, 'version.py')
setup_path = os.path.join(package_root, 'setup.py')
setup_tests_path = os.path.join(package_root, 'tests', 'setup.py')
tests_path = os.path.join(package_root, 'tests', '__init__.py')
file_paths = [version_path, setup_path]
if has_tests_package:
file_paths.extend([setup_tests_path, tests_path])
for file_path in file_paths:
orig_source = ''
with codecs.open(file_path, 'r', encoding='utf-8') as f:
orig_source = f.read()
found = 0
new_source = ''
for line in orig_source.splitlines(True):
if line.startswith('__version__ = '):
found += 1
new_source += '__version__ = %r\n' % new_version
elif line.startswith('__version_info__ = '):
found += 1
new_source += '__version_info__ = %r\n' % (new_version_info,)
elif line.startswith('PACKAGE_VERSION = '):
found += 1
new_source += 'PACKAGE_VERSION = %r\n' % new_version
else:
new_source += line
if found == 0:
raise ValueError('Did not find any versions in %s' % file_path)
s = 's' if found > 1 else ''
rel_path = file_path[len(package_root) + 1:]
was_were = 'was' if found == 1 else 'were'
if new_source != orig_source:
print('Updated %d version%s in %s' % (found, s, rel_path))
with codecs.open(file_path, 'w', encoding='utf-8') as f:
f.write(new_source)
else:
print('%d version%s in %s %s up-to-date' % (found, s, rel_path, was_were))
return True

View File

@ -0,0 +1,267 @@
# asn1crypto
A fast, pure Python library for parsing and serializing ASN.1 structures.
- [Features](#features)
- [Why Another Python ASN.1 Library?](#why-another-python-asn1-library)
- [Related Crypto Libraries](#related-crypto-libraries)
- [Current Release](#current-release)
- [Dependencies](#dependencies)
- [Installation](#installation)
- [License](#license)
- [Documentation](#documentation)
- [Continuous Integration](#continuous-integration)
- [Testing](#testing)
- [Development](#development)
- [CI Tasks](#ci-tasks)
[![GitHub Actions CI](https://github.com/wbond/asn1crypto/workflows/CI/badge.svg)](https://github.com/wbond/asn1crypto/actions?workflow=CI)
[![CircleCI](https://circleci.com/gh/wbond/asn1crypto.svg?style=shield)](https://circleci.com/gh/wbond/asn1crypto)
[![PyPI](https://img.shields.io/pypi/v/asn1crypto.svg)](https://pypi.org/project/asn1crypto/)
## Features
In addition to an ASN.1 BER/DER decoder and DER serializer, the project includes
a bunch of ASN.1 structures for use with various common cryptography standards:
| Standard | Module | Source |
| ---------------------- | ------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------- |
| X.509 | [`asn1crypto.x509`](asn1crypto/x509.py) | [RFC 5280](https://tools.ietf.org/html/rfc5280) |
| CRL | [`asn1crypto.crl`](asn1crypto/crl.py) | [RFC 5280](https://tools.ietf.org/html/rfc5280) |
| CSR | [`asn1crypto.csr`](asn1crypto/csr.py) | [RFC 2986](https://tools.ietf.org/html/rfc2986), [RFC 2985](https://tools.ietf.org/html/rfc2985) |
| OCSP | [`asn1crypto.ocsp`](asn1crypto/ocsp.py) | [RFC 6960](https://tools.ietf.org/html/rfc6960) |
| PKCS#12 | [`asn1crypto.pkcs12`](asn1crypto/pkcs12.py) | [RFC 7292](https://tools.ietf.org/html/rfc7292) |
| PKCS#8 | [`asn1crypto.keys`](asn1crypto/keys.py) | [RFC 5208](https://tools.ietf.org/html/rfc5208) |
| PKCS#1 v2.1 (RSA keys) | [`asn1crypto.keys`](asn1crypto/keys.py) | [RFC 3447](https://tools.ietf.org/html/rfc3447) |
| DSA keys | [`asn1crypto.keys`](asn1crypto/keys.py) | [RFC 3279](https://tools.ietf.org/html/rfc3279) |
| Elliptic curve keys | [`asn1crypto.keys`](asn1crypto/keys.py) | [SECG SEC1 V2](http://www.secg.org/sec1-v2.pdf) |
| PKCS#3 v1.4 | [`asn1crypto.algos`](asn1crypto/algos.py) | [PKCS#3 v1.4](ftp://ftp.rsasecurity.com/pub/pkcs/ascii/pkcs-3.asc) |
| PKCS#5 v2.1 | [`asn1crypto.algos`](asn1crypto/algos.py) | [PKCS#5 v2.1](http://www.emc.com/collateral/white-papers/h11302-pkcs5v2-1-password-based-cryptography-standard-wp.pdf) |
| CMS (and PKCS#7) | [`asn1crypto.cms`](asn1crypto/cms.py) | [RFC 5652](https://tools.ietf.org/html/rfc5652), [RFC 2315](https://tools.ietf.org/html/rfc2315) |
| TSP | [`asn1crypto.tsp`](asn1crypto/tsp.py) | [RFC 3161](https://tools.ietf.org/html/rfc3161) |
| PDF signatures | [`asn1crypto.pdf`](asn1crypto/pdf.py) | [PDF 1.7](http://wwwimages.adobe.com/content/dam/Adobe/en/devnet/pdf/pdfs/PDF32000_2008.pdf) |
## Why Another Python ASN.1 Library?
Python has long had the [pyasn1](https://pypi.org/project/pyasn1/) and
[pyasn1_modules](https://pypi.org/project/pyasn1-modules/) available for
parsing and serializing ASN.1 structures. While the project does include a
comprehensive set of tools for parsing and serializing, the performance of the
library can be very poor, especially when dealing with bit fields and parsing
large structures such as CRLs.
After spending extensive time using *pyasn1*, the following issues were
identified:
1. Poor performance
2. Verbose, non-pythonic API
3. Out-dated and incomplete definitions in *pyasn1-modules*
4. No simple way to map data to native Python data structures
5. No mechanism for overridden universal ASN.1 types
The *pyasn1* API is largely method driven, and uses extensive configuration
objects and lowerCamelCase names. There were no consistent options for
converting types of native Python data structures. Since the project supports
out-dated versions of Python, many newer language features are unavailable
for use.
Time was spent trying to profile issues with the performance, however the
architecture made it hard to pin down the primary source of the poor
performance. Attempts were made to improve performance by utilizing unreleased
patches and delaying parsing using the `Any` type. Even with such changes, the
performance was still unacceptably slow.
Finally, a number of structures in the cryptographic space use universal data
types such as `BitString` and `OctetString`, but interpret the data as other
types. For instance, signatures are really byte strings, but are encoded as
`BitString`. Elliptic curve keys use both `BitString` and `OctetString` to
represent integers. Parsing these structures as the base universal types and
then re-interpreting them wastes computation.
*asn1crypto* uses the following techniques to improve performance, especially
when extracting one or two fields from large, complex structures:
- Delayed parsing of byte string values
- Persistence of original ASN.1 encoded data until a value is changed
- Lazy loading of child fields
- Utilization of high-level Python stdlib modules
While there is no extensive performance test suite, the
`CRLTests.test_parse_crl` test case was used to parse a 21MB CRL file on a
late 2013 rMBP. *asn1crypto* parsed the certificate serial numbers in just
under 8 seconds. With *pyasn1*, using definitions from *pyasn1-modules*, the
same parsing took over 4,100 seconds.
For smaller structures the performance difference can range from a few times
faster to an order of magnitude or more.
## Related Crypto Libraries
*asn1crypto* is part of the modularcrypto family of Python packages:
- [asn1crypto](https://github.com/wbond/asn1crypto)
- [oscrypto](https://github.com/wbond/oscrypto)
- [csrbuilder](https://github.com/wbond/csrbuilder)
- [certbuilder](https://github.com/wbond/certbuilder)
- [crlbuilder](https://github.com/wbond/crlbuilder)
- [ocspbuilder](https://github.com/wbond/ocspbuilder)
- [certvalidator](https://github.com/wbond/certvalidator)
## Current Release
1.4.0 - [changelog](changelog.md)
## Dependencies
Python 2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6, 3.7, 3.8, 3.9 or pypy. *No third-party
packages required.*
## Installation
```bash
pip install asn1crypto
```
## License
*asn1crypto* is licensed under the terms of the MIT license. See the
[LICENSE](LICENSE) file for the exact license text.
## Documentation
The documentation for *asn1crypto* is composed of tutorials on basic usage and
links to the source for the various pre-defined type classes.
### Tutorials
- [Universal Types with BER/DER Decoder and DER Encoder](docs/universal_types.md)
- [PEM Encoder and Decoder](docs/pem.md)
### Reference
- [Universal types](asn1crypto/core.py), `asn1crypto.core`
- [Digest, HMAC, signed digest and encryption algorithms](asn1crypto/algos.py), `asn1crypto.algos`
- [Private and public keys](asn1crypto/keys.py), `asn1crypto.keys`
- [X509 certificates](asn1crypto/x509.py), `asn1crypto.x509`
- [Certificate revocation lists (CRLs)](asn1crypto/crl.py), `asn1crypto.crl`
- [Online certificate status protocol (OCSP)](asn1crypto/ocsp.py), `asn1crypto.ocsp`
- [Certificate signing requests (CSRs)](asn1crypto/csr.py), `asn1crypto.csr`
- [Private key/certificate containers (PKCS#12)](asn1crypto/pkcs12.py), `asn1crypto.pkcs12`
- [Cryptographic message syntax (CMS, PKCS#7)](asn1crypto/cms.py), `asn1crypto.cms`
- [Time stamp protocol (TSP)](asn1crypto/tsp.py), `asn1crypto.tsp`
- [PDF signatures](asn1crypto/pdf.py), `asn1crypto.pdf`
## Continuous Integration
Various combinations of platforms and versions of Python are tested via:
- [macOS, Linux, Windows](https://github.com/wbond/asn1crypto/actions/workflows/ci.yml) via GitHub Actions
- [arm64](https://circleci.com/gh/wbond/asn1crypto) via CircleCI
## Testing
Tests are written using `unittest` and require no third-party packages.
Depending on what type of source is available for the package, the following
commands can be used to run the test suite.
### Git Repository
When working within a Git working copy, or an archive of the Git repository,
the full test suite is run via:
```bash
python run.py tests
```
To run only some tests, pass a regular expression as a parameter to `tests`.
```bash
python run.py tests ocsp
```
### PyPi Source Distribution
When working within an extracted source distribution (aka `.tar.gz`) from
PyPi, the full test suite is run via:
```bash
python setup.py test
```
### Package
When the package has been installed via pip (or another method), the package
`asn1crypto_tests` may be installed and invoked to run the full test suite:
```bash
pip install asn1crypto_tests
python -m asn1crypto_tests
```
## Development
To install the package used for linting, execute:
```bash
pip install --user -r requires/lint
```
The following command will run the linter:
```bash
python run.py lint
```
Support for code coverage can be installed via:
```bash
pip install --user -r requires/coverage
```
Coverage is measured by running:
```bash
python run.py coverage
```
To change the version number of the package, run:
```bash
python run.py version {pep440_version}
```
To install the necessary packages for releasing a new version on PyPI, run:
```bash
pip install --user -r requires/release
```
Releases are created by:
- Making a git tag in [PEP 440](https://www.python.org/dev/peps/pep-0440/#examples-of-compliant-version-schemes) format
- Running the command:
```bash
python run.py release
```
Existing releases can be found at https://pypi.org/project/asn1crypto/.
## CI Tasks
A task named `deps` exists to download and stage all necessary testing
dependencies. On posix platforms, `curl` is used for downloads and on Windows
PowerShell with `Net.WebClient` is used. This configuration sidesteps issues
related to getting pip to work properly and messing with `site-packages` for
the version of Python being used.
The `ci` task runs `lint` (if flake8 is available for the version of Python) and
`coverage` (or `tests` if coverage is not available for the version of Python).
If the current directory is a clean git working copy, the coverage data is
submitted to codecov.io.
```bash
python run.py deps
python run.py ci
```

View File

@ -0,0 +1,2 @@
-r ./coverage
-r ./lint

View File

@ -0,0 +1,5 @@
setuptools == 39.2.0 ; python_version == '3.3'
coverage == 4.4.1 ; python_version == '2.6'
coverage == 4.2 ; python_version == '3.3' and sys_platform == "win32"
coverage == 4.5.4 ; (python_version == '3.3' and sys_platform != "win32") or python_version == '3.4'
coverage == 5.5 ; python_version == '2.7' or python_version >= '3.5'

View File

@ -0,0 +1,14 @@
setuptools >= 39.0.1 ; python_version == '2.7' or python_version >= '3.3'
enum34 == 1.1.6 ; python_version == '2.7' or python_version == '3.3'
configparser == 3.5.0 ; python_version == '2.7'
mccabe == 0.6.1 ; python_version == '3.3'
pycodestyle == 2.3.1 ; python_version == '3.3'
pyflakes == 1.6.0 ; python_version == '3.3'
flake8 == 3.5.0 ; python_version == '3.3'
mccabe == 0.6.1 ; python_version == '2.7' or python_version >= '3.4'
pycodestyle == 2.5.0 ; python_version == '2.7' or python_version >= '3.4'
pyflakes == 2.1.1 ; python_version == '2.7' or python_version >= '3.4'
functools32 == 3.2.3-2 ; python_version == '2.7'
typing == 3.7.4.1 ; python_version == '2.7' or python_version == '3.4'
entrypoints == 0.3 ; python_version == '2.7' or python_version >= '3.4'
flake8 == 3.7.9 ; python_version == '2.7' or python_version >= '3.4'

View File

@ -0,0 +1,3 @@
wheel >= 0.31.0
twine >= 1.11.0
setuptools >= 38.6.0

View File

@ -0,0 +1,8 @@
#!/usr/bin/env python
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
from dev._task import run_task
run_task()

View File

@ -0,0 +1,159 @@
import codecs
import os
import shutil
import sys
import warnings
import setuptools
from setuptools import setup, Command
from setuptools.command.egg_info import egg_info
PACKAGE_NAME = 'asn1crypto'
PACKAGE_VERSION = '1.4.0'
PACKAGE_ROOT = os.path.dirname(os.path.abspath(__file__))
# setuptools 38.6.0 and newer know about long_description_content_type, but
# distutils still complains about it, so silence the warning
sv = setuptools.__version__
svi = tuple(int(o) if o.isdigit() else o for o in sv.split('.'))
if svi >= (38, 6):
warnings.filterwarnings(
'ignore',
"Unknown distribution option: 'long_description_content_type'",
module='distutils.dist'
)
# Try to load the tests first from the source repository layout. If that
# doesn't work, we assume this file is in the release package, and the tests
# are part of the package {PACKAGE_NAME}_tests.
if os.path.exists(os.path.join(PACKAGE_ROOT, 'tests')):
tests_require = []
test_suite = 'tests.make_suite'
else:
tests_require = ['%s_tests' % PACKAGE_NAME]
test_suite = '%s_tests.make_suite' % PACKAGE_NAME
# This allows us to send the LICENSE and docs when creating a sdist. Wheels
# automatically include the LICENSE, and don't need the docs. For these
# to be included, the command must be "python setup.py sdist".
package_data = {}
if sys.argv[1:] == ['sdist'] or sorted(sys.argv[1:]) == ['-q', 'sdist']:
package_data[PACKAGE_NAME] = [
'../LICENSE',
'../*.md',
'../docs/*.md',
]
# Ensures a copy of the LICENSE is included with the egg-info for
# install and bdist_egg commands
class EggInfoCommand(egg_info):
def run(self):
egg_info_path = os.path.join(
PACKAGE_ROOT,
'%s.egg-info' % PACKAGE_NAME
)
if not os.path.exists(egg_info_path):
os.mkdir(egg_info_path)
shutil.copy2(
os.path.join(PACKAGE_ROOT, 'LICENSE'),
os.path.join(egg_info_path, 'LICENSE')
)
egg_info.run(self)
class CleanCommand(Command):
user_options = [
('all', 'a', '(Compatibility with original clean command)'),
]
def initialize_options(self):
self.all = False
def finalize_options(self):
pass
def run(self):
sub_folders = ['build', 'temp', '%s.egg-info' % PACKAGE_NAME]
if self.all:
sub_folders.append('dist')
for sub_folder in sub_folders:
full_path = os.path.join(PACKAGE_ROOT, sub_folder)
if os.path.exists(full_path):
shutil.rmtree(full_path)
for root, dirs, files in os.walk(os.path.join(PACKAGE_ROOT, PACKAGE_NAME)):
for filename in files:
if filename[-4:] == '.pyc':
os.unlink(os.path.join(root, filename))
for dirname in list(dirs):
if dirname == '__pycache__':
shutil.rmtree(os.path.join(root, dirname))
readme = ''
with codecs.open(os.path.join(PACKAGE_ROOT, 'readme.md'), 'r', 'utf-8') as f:
readme = f.read()
setup(
name=PACKAGE_NAME,
version=PACKAGE_VERSION,
description=(
'Fast ASN.1 parser and serializer with definitions for private keys, '
'public keys, certificates, CRL, OCSP, CMS, PKCS#3, PKCS#7, PKCS#8, '
'PKCS#12, PKCS#5, X.509 and TSP'
),
long_description=readme,
long_description_content_type='text/markdown',
url='https://github.com/wbond/asn1crypto',
author='wbond',
author_email='will@wbond.net',
license='MIT',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Security :: Cryptography',
],
keywords='asn1 crypto pki x509 certificate rsa dsa ec dh',
packages=[PACKAGE_NAME],
package_data=package_data,
tests_require=tests_require,
test_suite=test_suite,
cmdclass={
'clean': CleanCommand,
'egg_info': EggInfoCommand,
}
)

View File

@ -0,0 +1,13 @@
[tox]
envlist = py26,py27,py32,py33,py34,py35,py36,py37,py38,py39,pypy
[testenv]
deps = -rrequires/ci
commands = {envpython} run.py ci
[pep8]
max-line-length = 120
[flake8]
max-line-length = 120
jobs = 1

View File

@ -0,0 +1,15 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
__all__ = [
"__version__",
"__author__",
"__copyright__",
]
__version__ = "36.0.1"
__author__ = "The Python Cryptographic Authority and individual contributors"
__copyright__ = "Copyright 2013-2021 {}".format(__author__)

View File

@ -0,0 +1,17 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from cryptography.__about__ import (
__author__,
__copyright__,
__version__,
)
__all__ = [
"__version__",
"__author__",
"__copyright__",
]

View File

@ -0,0 +1,57 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from cryptography import utils
class _Reasons(utils.Enum):
BACKEND_MISSING_INTERFACE = 0
UNSUPPORTED_HASH = 1
UNSUPPORTED_CIPHER = 2
UNSUPPORTED_PADDING = 3
UNSUPPORTED_MGF = 4
UNSUPPORTED_PUBLIC_KEY_ALGORITHM = 5
UNSUPPORTED_ELLIPTIC_CURVE = 6
UNSUPPORTED_SERIALIZATION = 7
UNSUPPORTED_X509 = 8
UNSUPPORTED_EXCHANGE_ALGORITHM = 9
UNSUPPORTED_DIFFIE_HELLMAN = 10
UNSUPPORTED_MAC = 11
class UnsupportedAlgorithm(Exception):
def __init__(self, message, reason=None):
super(UnsupportedAlgorithm, self).__init__(message)
self._reason = reason
class AlreadyFinalized(Exception):
pass
class AlreadyUpdated(Exception):
pass
class NotYetFinalized(Exception):
pass
class InvalidTag(Exception):
pass
class InvalidSignature(Exception):
pass
class InternalError(Exception):
def __init__(self, msg, err_code):
super(InternalError, self).__init__(msg)
self.err_code = err_code
class InvalidKey(Exception):
pass

View File

@ -0,0 +1,205 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
import base64
import binascii
import os
import struct
import time
import typing
from cryptography import utils
from cryptography.exceptions import InvalidSignature
from cryptography.hazmat.primitives import hashes, padding
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
from cryptography.hazmat.primitives.hmac import HMAC
class InvalidToken(Exception):
pass
_MAX_CLOCK_SKEW = 60
class Fernet(object):
def __init__(
self,
key: typing.Union[bytes, str],
backend: typing.Any = None,
):
key = base64.urlsafe_b64decode(key)
if len(key) != 32:
raise ValueError(
"Fernet key must be 32 url-safe base64-encoded bytes."
)
self._signing_key = key[:16]
self._encryption_key = key[16:]
@classmethod
def generate_key(cls) -> bytes:
return base64.urlsafe_b64encode(os.urandom(32))
def encrypt(self, data: bytes) -> bytes:
return self.encrypt_at_time(data, int(time.time()))
def encrypt_at_time(self, data: bytes, current_time: int) -> bytes:
iv = os.urandom(16)
return self._encrypt_from_parts(data, current_time, iv)
def _encrypt_from_parts(
self, data: bytes, current_time: int, iv: bytes
) -> bytes:
utils._check_bytes("data", data)
padder = padding.PKCS7(algorithms.AES.block_size).padder()
padded_data = padder.update(data) + padder.finalize()
encryptor = Cipher(
algorithms.AES(self._encryption_key),
modes.CBC(iv),
).encryptor()
ciphertext = encryptor.update(padded_data) + encryptor.finalize()
basic_parts = (
b"\x80" + struct.pack(">Q", current_time) + iv + ciphertext
)
h = HMAC(self._signing_key, hashes.SHA256())
h.update(basic_parts)
hmac = h.finalize()
return base64.urlsafe_b64encode(basic_parts + hmac)
def decrypt(self, token: bytes, ttl: typing.Optional[int] = None) -> bytes:
timestamp, data = Fernet._get_unverified_token_data(token)
if ttl is None:
time_info = None
else:
time_info = (ttl, int(time.time()))
return self._decrypt_data(data, timestamp, time_info)
def decrypt_at_time(
self, token: bytes, ttl: int, current_time: int
) -> bytes:
if ttl is None:
raise ValueError(
"decrypt_at_time() can only be used with a non-None ttl"
)
timestamp, data = Fernet._get_unverified_token_data(token)
return self._decrypt_data(data, timestamp, (ttl, current_time))
def extract_timestamp(self, token: bytes) -> int:
timestamp, data = Fernet._get_unverified_token_data(token)
# Verify the token was not tampered with.
self._verify_signature(data)
return timestamp
@staticmethod
def _get_unverified_token_data(token: bytes) -> typing.Tuple[int, bytes]:
utils._check_bytes("token", token)
try:
data = base64.urlsafe_b64decode(token)
except (TypeError, binascii.Error):
raise InvalidToken
if not data or data[0] != 0x80:
raise InvalidToken
try:
(timestamp,) = struct.unpack(">Q", data[1:9])
except struct.error:
raise InvalidToken
return timestamp, data
def _verify_signature(self, data: bytes) -> None:
h = HMAC(self._signing_key, hashes.SHA256())
h.update(data[:-32])
try:
h.verify(data[-32:])
except InvalidSignature:
raise InvalidToken
def _decrypt_data(
self,
data: bytes,
timestamp: int,
time_info: typing.Optional[typing.Tuple[int, int]],
) -> bytes:
if time_info is not None:
ttl, current_time = time_info
if timestamp + ttl < current_time:
raise InvalidToken
if current_time + _MAX_CLOCK_SKEW < timestamp:
raise InvalidToken
self._verify_signature(data)
iv = data[9:25]
ciphertext = data[25:-32]
decryptor = Cipher(
algorithms.AES(self._encryption_key), modes.CBC(iv)
).decryptor()
plaintext_padded = decryptor.update(ciphertext)
try:
plaintext_padded += decryptor.finalize()
except ValueError:
raise InvalidToken
unpadder = padding.PKCS7(algorithms.AES.block_size).unpadder()
unpadded = unpadder.update(plaintext_padded)
try:
unpadded += unpadder.finalize()
except ValueError:
raise InvalidToken
return unpadded
class MultiFernet(object):
def __init__(self, fernets: typing.Iterable[Fernet]):
fernets = list(fernets)
if not fernets:
raise ValueError(
"MultiFernet requires at least one Fernet instance"
)
self._fernets = fernets
def encrypt(self, msg: bytes) -> bytes:
return self.encrypt_at_time(msg, int(time.time()))
def encrypt_at_time(self, msg: bytes, current_time: int) -> bytes:
return self._fernets[0].encrypt_at_time(msg, current_time)
def rotate(self, msg: bytes) -> bytes:
timestamp, data = Fernet._get_unverified_token_data(msg)
for f in self._fernets:
try:
p = f._decrypt_data(data, timestamp, None)
break
except InvalidToken:
pass
else:
raise InvalidToken
iv = os.urandom(16)
return self._fernets[0]._encrypt_from_parts(p, timestamp, iv)
def decrypt(self, msg: bytes, ttl: typing.Optional[int] = None) -> bytes:
for f in self._fernets:
try:
return f.decrypt(msg, ttl)
except InvalidToken:
pass
raise InvalidToken
def decrypt_at_time(
self, msg: bytes, ttl: int, current_time: int
) -> bytes:
for f in self._fernets:
try:
return f.decrypt_at_time(msg, ttl, current_time)
except InvalidToken:
pass
raise InvalidToken

View File

@ -0,0 +1,10 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
"""
Hazardous Materials
This is a "Hazardous Materials" module. You should ONLY use it if you're
100% absolutely sure that you know what you're doing because this module
is full of land mines, dragons, and dinosaurs with laser guns.
"""

View File

@ -0,0 +1,341 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
import typing
from cryptography.hazmat.primitives import hashes
class ObjectIdentifier(object):
def __init__(self, dotted_string: str) -> None:
self._dotted_string = dotted_string
nodes = self._dotted_string.split(".")
intnodes = []
# There must be at least 2 nodes, the first node must be 0..2, and
# if less than 2, the second node cannot have a value outside the
# range 0..39. All nodes must be integers.
for node in nodes:
try:
node_value = int(node, 10)
except ValueError:
raise ValueError(
"Malformed OID: %s (non-integer nodes)"
% (self._dotted_string)
)
if node_value < 0:
raise ValueError(
"Malformed OID: %s (negative-integer nodes)"
% (self._dotted_string)
)
intnodes.append(node_value)
if len(nodes) < 2:
raise ValueError(
"Malformed OID: %s (insufficient number of nodes)"
% (self._dotted_string)
)
if intnodes[0] > 2:
raise ValueError(
"Malformed OID: %s (first node outside valid range)"
% (self._dotted_string)
)
if intnodes[0] < 2 and intnodes[1] >= 40:
raise ValueError(
"Malformed OID: %s (second node outside valid range)"
% (self._dotted_string)
)
def __eq__(self, other: typing.Any) -> bool:
if not isinstance(other, ObjectIdentifier):
return NotImplemented
return self.dotted_string == other.dotted_string
def __ne__(self, other: typing.Any) -> bool:
return not self == other
def __repr__(self) -> str:
return "<ObjectIdentifier(oid={}, name={})>".format(
self.dotted_string, self._name
)
def __hash__(self) -> int:
return hash(self.dotted_string)
@property
def _name(self) -> str:
return _OID_NAMES.get(self, "Unknown OID")
@property
def dotted_string(self) -> str:
return self._dotted_string
class ExtensionOID(object):
SUBJECT_DIRECTORY_ATTRIBUTES = ObjectIdentifier("2.5.29.9")
SUBJECT_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.14")
KEY_USAGE = ObjectIdentifier("2.5.29.15")
SUBJECT_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.17")
ISSUER_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.18")
BASIC_CONSTRAINTS = ObjectIdentifier("2.5.29.19")
NAME_CONSTRAINTS = ObjectIdentifier("2.5.29.30")
CRL_DISTRIBUTION_POINTS = ObjectIdentifier("2.5.29.31")
CERTIFICATE_POLICIES = ObjectIdentifier("2.5.29.32")
POLICY_MAPPINGS = ObjectIdentifier("2.5.29.33")
AUTHORITY_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.35")
POLICY_CONSTRAINTS = ObjectIdentifier("2.5.29.36")
EXTENDED_KEY_USAGE = ObjectIdentifier("2.5.29.37")
FRESHEST_CRL = ObjectIdentifier("2.5.29.46")
INHIBIT_ANY_POLICY = ObjectIdentifier("2.5.29.54")
ISSUING_DISTRIBUTION_POINT = ObjectIdentifier("2.5.29.28")
AUTHORITY_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.1")
SUBJECT_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.11")
OCSP_NO_CHECK = ObjectIdentifier("1.3.6.1.5.5.7.48.1.5")
TLS_FEATURE = ObjectIdentifier("1.3.6.1.5.5.7.1.24")
CRL_NUMBER = ObjectIdentifier("2.5.29.20")
DELTA_CRL_INDICATOR = ObjectIdentifier("2.5.29.27")
PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS = ObjectIdentifier(
"1.3.6.1.4.1.11129.2.4.2"
)
PRECERT_POISON = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.3")
SIGNED_CERTIFICATE_TIMESTAMPS = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.5")
class OCSPExtensionOID(object):
NONCE = ObjectIdentifier("1.3.6.1.5.5.7.48.1.2")
class CRLEntryExtensionOID(object):
CERTIFICATE_ISSUER = ObjectIdentifier("2.5.29.29")
CRL_REASON = ObjectIdentifier("2.5.29.21")
INVALIDITY_DATE = ObjectIdentifier("2.5.29.24")
class NameOID(object):
COMMON_NAME = ObjectIdentifier("2.5.4.3")
COUNTRY_NAME = ObjectIdentifier("2.5.4.6")
LOCALITY_NAME = ObjectIdentifier("2.5.4.7")
STATE_OR_PROVINCE_NAME = ObjectIdentifier("2.5.4.8")
STREET_ADDRESS = ObjectIdentifier("2.5.4.9")
ORGANIZATION_NAME = ObjectIdentifier("2.5.4.10")
ORGANIZATIONAL_UNIT_NAME = ObjectIdentifier("2.5.4.11")
SERIAL_NUMBER = ObjectIdentifier("2.5.4.5")
SURNAME = ObjectIdentifier("2.5.4.4")
GIVEN_NAME = ObjectIdentifier("2.5.4.42")
TITLE = ObjectIdentifier("2.5.4.12")
GENERATION_QUALIFIER = ObjectIdentifier("2.5.4.44")
X500_UNIQUE_IDENTIFIER = ObjectIdentifier("2.5.4.45")
DN_QUALIFIER = ObjectIdentifier("2.5.4.46")
PSEUDONYM = ObjectIdentifier("2.5.4.65")
USER_ID = ObjectIdentifier("0.9.2342.19200300.100.1.1")
DOMAIN_COMPONENT = ObjectIdentifier("0.9.2342.19200300.100.1.25")
EMAIL_ADDRESS = ObjectIdentifier("1.2.840.113549.1.9.1")
JURISDICTION_COUNTRY_NAME = ObjectIdentifier("1.3.6.1.4.1.311.60.2.1.3")
JURISDICTION_LOCALITY_NAME = ObjectIdentifier("1.3.6.1.4.1.311.60.2.1.1")
JURISDICTION_STATE_OR_PROVINCE_NAME = ObjectIdentifier(
"1.3.6.1.4.1.311.60.2.1.2"
)
BUSINESS_CATEGORY = ObjectIdentifier("2.5.4.15")
POSTAL_ADDRESS = ObjectIdentifier("2.5.4.16")
POSTAL_CODE = ObjectIdentifier("2.5.4.17")
INN = ObjectIdentifier("1.2.643.3.131.1.1")
OGRN = ObjectIdentifier("1.2.643.100.1")
SNILS = ObjectIdentifier("1.2.643.100.3")
UNSTRUCTURED_NAME = ObjectIdentifier("1.2.840.113549.1.9.2")
class SignatureAlgorithmOID(object):
RSA_WITH_MD5 = ObjectIdentifier("1.2.840.113549.1.1.4")
RSA_WITH_SHA1 = ObjectIdentifier("1.2.840.113549.1.1.5")
# This is an alternate OID for RSA with SHA1 that is occasionally seen
_RSA_WITH_SHA1 = ObjectIdentifier("1.3.14.3.2.29")
RSA_WITH_SHA224 = ObjectIdentifier("1.2.840.113549.1.1.14")
RSA_WITH_SHA256 = ObjectIdentifier("1.2.840.113549.1.1.11")
RSA_WITH_SHA384 = ObjectIdentifier("1.2.840.113549.1.1.12")
RSA_WITH_SHA512 = ObjectIdentifier("1.2.840.113549.1.1.13")
RSASSA_PSS = ObjectIdentifier("1.2.840.113549.1.1.10")
ECDSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10045.4.1")
ECDSA_WITH_SHA224 = ObjectIdentifier("1.2.840.10045.4.3.1")
ECDSA_WITH_SHA256 = ObjectIdentifier("1.2.840.10045.4.3.2")
ECDSA_WITH_SHA384 = ObjectIdentifier("1.2.840.10045.4.3.3")
ECDSA_WITH_SHA512 = ObjectIdentifier("1.2.840.10045.4.3.4")
DSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10040.4.3")
DSA_WITH_SHA224 = ObjectIdentifier("2.16.840.1.101.3.4.3.1")
DSA_WITH_SHA256 = ObjectIdentifier("2.16.840.1.101.3.4.3.2")
DSA_WITH_SHA384 = ObjectIdentifier("2.16.840.1.101.3.4.3.3")
DSA_WITH_SHA512 = ObjectIdentifier("2.16.840.1.101.3.4.3.4")
ED25519 = ObjectIdentifier("1.3.101.112")
ED448 = ObjectIdentifier("1.3.101.113")
GOSTR3411_94_WITH_3410_2001 = ObjectIdentifier("1.2.643.2.2.3")
GOSTR3410_2012_WITH_3411_2012_256 = ObjectIdentifier("1.2.643.7.1.1.3.2")
GOSTR3410_2012_WITH_3411_2012_512 = ObjectIdentifier("1.2.643.7.1.1.3.3")
_SIG_OIDS_TO_HASH: typing.Dict[
ObjectIdentifier, typing.Optional[hashes.HashAlgorithm]
] = {
SignatureAlgorithmOID.RSA_WITH_MD5: hashes.MD5(),
SignatureAlgorithmOID.RSA_WITH_SHA1: hashes.SHA1(),
SignatureAlgorithmOID._RSA_WITH_SHA1: hashes.SHA1(),
SignatureAlgorithmOID.RSA_WITH_SHA224: hashes.SHA224(),
SignatureAlgorithmOID.RSA_WITH_SHA256: hashes.SHA256(),
SignatureAlgorithmOID.RSA_WITH_SHA384: hashes.SHA384(),
SignatureAlgorithmOID.RSA_WITH_SHA512: hashes.SHA512(),
SignatureAlgorithmOID.ECDSA_WITH_SHA1: hashes.SHA1(),
SignatureAlgorithmOID.ECDSA_WITH_SHA224: hashes.SHA224(),
SignatureAlgorithmOID.ECDSA_WITH_SHA256: hashes.SHA256(),
SignatureAlgorithmOID.ECDSA_WITH_SHA384: hashes.SHA384(),
SignatureAlgorithmOID.ECDSA_WITH_SHA512: hashes.SHA512(),
SignatureAlgorithmOID.DSA_WITH_SHA1: hashes.SHA1(),
SignatureAlgorithmOID.DSA_WITH_SHA224: hashes.SHA224(),
SignatureAlgorithmOID.DSA_WITH_SHA256: hashes.SHA256(),
SignatureAlgorithmOID.ED25519: None,
SignatureAlgorithmOID.ED448: None,
SignatureAlgorithmOID.GOSTR3411_94_WITH_3410_2001: None,
SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_256: None,
SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_512: None,
}
class ExtendedKeyUsageOID(object):
SERVER_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.1")
CLIENT_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.2")
CODE_SIGNING = ObjectIdentifier("1.3.6.1.5.5.7.3.3")
EMAIL_PROTECTION = ObjectIdentifier("1.3.6.1.5.5.7.3.4")
TIME_STAMPING = ObjectIdentifier("1.3.6.1.5.5.7.3.8")
OCSP_SIGNING = ObjectIdentifier("1.3.6.1.5.5.7.3.9")
ANY_EXTENDED_KEY_USAGE = ObjectIdentifier("2.5.29.37.0")
SMARTCARD_LOGON = ObjectIdentifier("1.3.6.1.4.1.311.20.2.2")
KERBEROS_PKINIT_KDC = ObjectIdentifier("1.3.6.1.5.2.3.5")
class AuthorityInformationAccessOID(object):
CA_ISSUERS = ObjectIdentifier("1.3.6.1.5.5.7.48.2")
OCSP = ObjectIdentifier("1.3.6.1.5.5.7.48.1")
class SubjectInformationAccessOID(object):
CA_REPOSITORY = ObjectIdentifier("1.3.6.1.5.5.7.48.5")
class CertificatePoliciesOID(object):
CPS_QUALIFIER = ObjectIdentifier("1.3.6.1.5.5.7.2.1")
CPS_USER_NOTICE = ObjectIdentifier("1.3.6.1.5.5.7.2.2")
ANY_POLICY = ObjectIdentifier("2.5.29.32.0")
class AttributeOID(object):
CHALLENGE_PASSWORD = ObjectIdentifier("1.2.840.113549.1.9.7")
UNSTRUCTURED_NAME = ObjectIdentifier("1.2.840.113549.1.9.2")
_OID_NAMES = {
NameOID.COMMON_NAME: "commonName",
NameOID.COUNTRY_NAME: "countryName",
NameOID.LOCALITY_NAME: "localityName",
NameOID.STATE_OR_PROVINCE_NAME: "stateOrProvinceName",
NameOID.STREET_ADDRESS: "streetAddress",
NameOID.ORGANIZATION_NAME: "organizationName",
NameOID.ORGANIZATIONAL_UNIT_NAME: "organizationalUnitName",
NameOID.SERIAL_NUMBER: "serialNumber",
NameOID.SURNAME: "surname",
NameOID.GIVEN_NAME: "givenName",
NameOID.TITLE: "title",
NameOID.GENERATION_QUALIFIER: "generationQualifier",
NameOID.X500_UNIQUE_IDENTIFIER: "x500UniqueIdentifier",
NameOID.DN_QUALIFIER: "dnQualifier",
NameOID.PSEUDONYM: "pseudonym",
NameOID.USER_ID: "userID",
NameOID.DOMAIN_COMPONENT: "domainComponent",
NameOID.EMAIL_ADDRESS: "emailAddress",
NameOID.JURISDICTION_COUNTRY_NAME: "jurisdictionCountryName",
NameOID.JURISDICTION_LOCALITY_NAME: "jurisdictionLocalityName",
NameOID.JURISDICTION_STATE_OR_PROVINCE_NAME: (
"jurisdictionStateOrProvinceName"
),
NameOID.BUSINESS_CATEGORY: "businessCategory",
NameOID.POSTAL_ADDRESS: "postalAddress",
NameOID.POSTAL_CODE: "postalCode",
NameOID.INN: "INN",
NameOID.OGRN: "OGRN",
NameOID.SNILS: "SNILS",
NameOID.UNSTRUCTURED_NAME: "unstructuredName",
SignatureAlgorithmOID.RSA_WITH_MD5: "md5WithRSAEncryption",
SignatureAlgorithmOID.RSA_WITH_SHA1: "sha1WithRSAEncryption",
SignatureAlgorithmOID.RSA_WITH_SHA224: "sha224WithRSAEncryption",
SignatureAlgorithmOID.RSA_WITH_SHA256: "sha256WithRSAEncryption",
SignatureAlgorithmOID.RSA_WITH_SHA384: "sha384WithRSAEncryption",
SignatureAlgorithmOID.RSA_WITH_SHA512: "sha512WithRSAEncryption",
SignatureAlgorithmOID.RSASSA_PSS: "RSASSA-PSS",
SignatureAlgorithmOID.ECDSA_WITH_SHA1: "ecdsa-with-SHA1",
SignatureAlgorithmOID.ECDSA_WITH_SHA224: "ecdsa-with-SHA224",
SignatureAlgorithmOID.ECDSA_WITH_SHA256: "ecdsa-with-SHA256",
SignatureAlgorithmOID.ECDSA_WITH_SHA384: "ecdsa-with-SHA384",
SignatureAlgorithmOID.ECDSA_WITH_SHA512: "ecdsa-with-SHA512",
SignatureAlgorithmOID.DSA_WITH_SHA1: "dsa-with-sha1",
SignatureAlgorithmOID.DSA_WITH_SHA224: "dsa-with-sha224",
SignatureAlgorithmOID.DSA_WITH_SHA256: "dsa-with-sha256",
SignatureAlgorithmOID.ED25519: "ed25519",
SignatureAlgorithmOID.ED448: "ed448",
SignatureAlgorithmOID.GOSTR3411_94_WITH_3410_2001: (
"GOST R 34.11-94 with GOST R 34.10-2001"
),
SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_256: (
"GOST R 34.10-2012 with GOST R 34.11-2012 (256 bit)"
),
SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_512: (
"GOST R 34.10-2012 with GOST R 34.11-2012 (512 bit)"
),
ExtendedKeyUsageOID.SERVER_AUTH: "serverAuth",
ExtendedKeyUsageOID.CLIENT_AUTH: "clientAuth",
ExtendedKeyUsageOID.CODE_SIGNING: "codeSigning",
ExtendedKeyUsageOID.EMAIL_PROTECTION: "emailProtection",
ExtendedKeyUsageOID.TIME_STAMPING: "timeStamping",
ExtendedKeyUsageOID.OCSP_SIGNING: "OCSPSigning",
ExtendedKeyUsageOID.SMARTCARD_LOGON: "msSmartcardLogin",
ExtendedKeyUsageOID.KERBEROS_PKINIT_KDC: "pkInitKDC",
ExtensionOID.SUBJECT_DIRECTORY_ATTRIBUTES: "subjectDirectoryAttributes",
ExtensionOID.SUBJECT_KEY_IDENTIFIER: "subjectKeyIdentifier",
ExtensionOID.KEY_USAGE: "keyUsage",
ExtensionOID.SUBJECT_ALTERNATIVE_NAME: "subjectAltName",
ExtensionOID.ISSUER_ALTERNATIVE_NAME: "issuerAltName",
ExtensionOID.BASIC_CONSTRAINTS: "basicConstraints",
ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS: (
"signedCertificateTimestampList"
),
ExtensionOID.SIGNED_CERTIFICATE_TIMESTAMPS: (
"signedCertificateTimestampList"
),
ExtensionOID.PRECERT_POISON: "ctPoison",
CRLEntryExtensionOID.CRL_REASON: "cRLReason",
CRLEntryExtensionOID.INVALIDITY_DATE: "invalidityDate",
CRLEntryExtensionOID.CERTIFICATE_ISSUER: "certificateIssuer",
ExtensionOID.NAME_CONSTRAINTS: "nameConstraints",
ExtensionOID.CRL_DISTRIBUTION_POINTS: "cRLDistributionPoints",
ExtensionOID.CERTIFICATE_POLICIES: "certificatePolicies",
ExtensionOID.POLICY_MAPPINGS: "policyMappings",
ExtensionOID.AUTHORITY_KEY_IDENTIFIER: "authorityKeyIdentifier",
ExtensionOID.POLICY_CONSTRAINTS: "policyConstraints",
ExtensionOID.EXTENDED_KEY_USAGE: "extendedKeyUsage",
ExtensionOID.FRESHEST_CRL: "freshestCRL",
ExtensionOID.INHIBIT_ANY_POLICY: "inhibitAnyPolicy",
ExtensionOID.ISSUING_DISTRIBUTION_POINT: ("issuingDistributionPoint"),
ExtensionOID.AUTHORITY_INFORMATION_ACCESS: "authorityInfoAccess",
ExtensionOID.SUBJECT_INFORMATION_ACCESS: "subjectInfoAccess",
ExtensionOID.OCSP_NO_CHECK: "OCSPNoCheck",
ExtensionOID.CRL_NUMBER: "cRLNumber",
ExtensionOID.DELTA_CRL_INDICATOR: "deltaCRLIndicator",
ExtensionOID.TLS_FEATURE: "TLSFeature",
AuthorityInformationAccessOID.OCSP: "OCSP",
AuthorityInformationAccessOID.CA_ISSUERS: "caIssuers",
SubjectInformationAccessOID.CA_REPOSITORY: "caRepository",
CertificatePoliciesOID.CPS_QUALIFIER: "id-qt-cps",
CertificatePoliciesOID.CPS_USER_NOTICE: "id-qt-unotice",
OCSPExtensionOID.NONCE: "OCSPNonce",
AttributeOID.CHALLENGE_PASSWORD: "challengePassword",
}

View File

@ -0,0 +1,11 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from cryptography.hazmat.backends.interfaces import Backend
def default_backend() -> Backend:
from cryptography.hazmat.backends.openssl.backend import backend
return backend

View File

@ -0,0 +1,384 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
import abc
class CipherBackend(metaclass=abc.ABCMeta):
@abc.abstractmethod
def cipher_supported(self, cipher, mode):
"""
Return True if the given cipher and mode are supported.
"""
@abc.abstractmethod
def create_symmetric_encryption_ctx(self, cipher, mode):
"""
Get a CipherContext that can be used for encryption.
"""
@abc.abstractmethod
def create_symmetric_decryption_ctx(self, cipher, mode):
"""
Get a CipherContext that can be used for decryption.
"""
class HashBackend(metaclass=abc.ABCMeta):
@abc.abstractmethod
def hash_supported(self, algorithm):
"""
Return True if the hash algorithm is supported by this backend.
"""
@abc.abstractmethod
def create_hash_ctx(self, algorithm):
"""
Create a HashContext for calculating a message digest.
"""
class HMACBackend(metaclass=abc.ABCMeta):
@abc.abstractmethod
def hmac_supported(self, algorithm):
"""
Return True if the hash algorithm is supported for HMAC by this
backend.
"""
@abc.abstractmethod
def create_hmac_ctx(self, key, algorithm):
"""
Create a context for calculating a message authentication code.
"""
class CMACBackend(metaclass=abc.ABCMeta):
@abc.abstractmethod
def cmac_algorithm_supported(self, algorithm):
"""
Returns True if the block cipher is supported for CMAC by this backend
"""
@abc.abstractmethod
def create_cmac_ctx(self, algorithm):
"""
Create a context for calculating a message authentication code.
"""
class PBKDF2HMACBackend(metaclass=abc.ABCMeta):
@abc.abstractmethod
def pbkdf2_hmac_supported(self, algorithm):
"""
Return True if the hash algorithm is supported for PBKDF2 by this
backend.
"""
@abc.abstractmethod
def derive_pbkdf2_hmac(
self, algorithm, length, salt, iterations, key_material
):
"""
Return length bytes derived from provided PBKDF2 parameters.
"""
class RSABackend(metaclass=abc.ABCMeta):
@abc.abstractmethod
def generate_rsa_private_key(self, public_exponent, key_size):
"""
Generate an RSAPrivateKey instance with public_exponent and a modulus
of key_size bits.
"""
@abc.abstractmethod
def rsa_padding_supported(self, padding):
"""
Returns True if the backend supports the given padding options.
"""
@abc.abstractmethod
def generate_rsa_parameters_supported(self, public_exponent, key_size):
"""
Returns True if the backend supports the given parameters for key
generation.
"""
@abc.abstractmethod
def load_rsa_private_numbers(self, numbers):
"""
Returns an RSAPrivateKey provider.
"""
@abc.abstractmethod
def load_rsa_public_numbers(self, numbers):
"""
Returns an RSAPublicKey provider.
"""
class DSABackend(metaclass=abc.ABCMeta):
@abc.abstractmethod
def generate_dsa_parameters(self, key_size):
"""
Generate a DSAParameters instance with a modulus of key_size bits.
"""
@abc.abstractmethod
def generate_dsa_private_key(self, parameters):
"""
Generate a DSAPrivateKey instance with parameters as a DSAParameters
object.
"""
@abc.abstractmethod
def generate_dsa_private_key_and_parameters(self, key_size):
"""
Generate a DSAPrivateKey instance using key size only.
"""
@abc.abstractmethod
def dsa_hash_supported(self, algorithm):
"""
Return True if the hash algorithm is supported by the backend for DSA.
"""
@abc.abstractmethod
def dsa_parameters_supported(self, p, q, g):
"""
Return True if the parameters are supported by the backend for DSA.
"""
@abc.abstractmethod
def load_dsa_private_numbers(self, numbers):
"""
Returns a DSAPrivateKey provider.
"""
@abc.abstractmethod
def load_dsa_public_numbers(self, numbers):
"""
Returns a DSAPublicKey provider.
"""
@abc.abstractmethod
def load_dsa_parameter_numbers(self, numbers):
"""
Returns a DSAParameters provider.
"""
class EllipticCurveBackend(metaclass=abc.ABCMeta):
@abc.abstractmethod
def elliptic_curve_signature_algorithm_supported(
self, signature_algorithm, curve
):
"""
Returns True if the backend supports the named elliptic curve with the
specified signature algorithm.
"""
@abc.abstractmethod
def elliptic_curve_supported(self, curve):
"""
Returns True if the backend supports the named elliptic curve.
"""
@abc.abstractmethod
def generate_elliptic_curve_private_key(self, curve):
"""
Return an object conforming to the EllipticCurvePrivateKey interface.
"""
@abc.abstractmethod
def load_elliptic_curve_public_numbers(self, numbers):
"""
Return an EllipticCurvePublicKey provider using the given numbers.
"""
@abc.abstractmethod
def load_elliptic_curve_private_numbers(self, numbers):
"""
Return an EllipticCurvePrivateKey provider using the given numbers.
"""
@abc.abstractmethod
def elliptic_curve_exchange_algorithm_supported(self, algorithm, curve):
"""
Returns whether the exchange algorithm is supported by this backend.
"""
@abc.abstractmethod
def derive_elliptic_curve_private_key(self, private_value, curve):
"""
Compute the private key given the private value and curve.
"""
class PEMSerializationBackend(metaclass=abc.ABCMeta):
@abc.abstractmethod
def load_pem_private_key(self, data, password):
"""
Loads a private key from PEM encoded data, using the provided password
if the data is encrypted.
"""
@abc.abstractmethod
def load_pem_public_key(self, data):
"""
Loads a public key from PEM encoded data.
"""
@abc.abstractmethod
def load_pem_parameters(self, data):
"""
Load encryption parameters from PEM encoded data.
"""
class DERSerializationBackend(metaclass=abc.ABCMeta):
@abc.abstractmethod
def load_der_private_key(self, data, password):
"""
Loads a private key from DER encoded data. Uses the provided password
if the data is encrypted.
"""
@abc.abstractmethod
def load_der_public_key(self, data):
"""
Loads a public key from DER encoded data.
"""
@abc.abstractmethod
def load_der_parameters(self, data):
"""
Load encryption parameters from DER encoded data.
"""
class DHBackend(metaclass=abc.ABCMeta):
@abc.abstractmethod
def generate_dh_parameters(self, generator, key_size):
"""
Generate a DHParameters instance with a modulus of key_size bits.
Using the given generator. Often 2 or 5.
"""
@abc.abstractmethod
def generate_dh_private_key(self, parameters):
"""
Generate a DHPrivateKey instance with parameters as a DHParameters
object.
"""
@abc.abstractmethod
def generate_dh_private_key_and_parameters(self, generator, key_size):
"""
Generate a DHPrivateKey instance using key size only.
Using the given generator. Often 2 or 5.
"""
@abc.abstractmethod
def load_dh_private_numbers(self, numbers):
"""
Load a DHPrivateKey from DHPrivateNumbers
"""
@abc.abstractmethod
def load_dh_public_numbers(self, numbers):
"""
Load a DHPublicKey from DHPublicNumbers.
"""
@abc.abstractmethod
def load_dh_parameter_numbers(self, numbers):
"""
Load DHParameters from DHParameterNumbers.
"""
@abc.abstractmethod
def dh_parameters_supported(self, p, g, q=None):
"""
Returns whether the backend supports DH with these parameter values.
"""
@abc.abstractmethod
def dh_x942_serialization_supported(self):
"""
Returns True if the backend supports the serialization of DH objects
with subgroup order (q).
"""
class ScryptBackend(metaclass=abc.ABCMeta):
@abc.abstractmethod
def derive_scrypt(self, key_material, salt, length, n, r, p):
"""
Return bytes derived from provided Scrypt parameters.
"""
@abc.abstractmethod
def scrypt_supported(self):
"""
Return True if Scrypt is supported.
"""
# This is the catch-all for future backend methods and inherits all the
# other interfaces as well so we can just use Backend for typing.
class Backend(
CipherBackend,
CMACBackend,
DERSerializationBackend,
DHBackend,
DSABackend,
EllipticCurveBackend,
HashBackend,
HMACBackend,
PBKDF2HMACBackend,
RSABackend,
PEMSerializationBackend,
ScryptBackend,
metaclass=abc.ABCMeta,
):
@abc.abstractmethod
def load_pem_pkcs7_certificates(self, data):
"""
Returns a list of x509.Certificate
"""
@abc.abstractmethod
def load_der_pkcs7_certificates(self, data):
"""
Returns a list of x509.Certificate
"""
@abc.abstractmethod
def pkcs7_sign(self, builder, encoding, options):
"""
Returns bytes
"""
@abc.abstractmethod
def load_key_and_certificates_from_pkcs12(self, data, password):
"""
Returns a tuple of (key, cert, [certs])
"""
@abc.abstractmethod
def load_pkcs12(self, data, password):
"""
Returns a PKCS12KeyAndCertificates object
"""
@abc.abstractmethod
def serialize_key_and_certificates_to_pkcs12(
self, name, key, cert, cas, encryption_algorithm
):
"""
Returns bytes
"""

View File

@ -0,0 +1,9 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from cryptography.hazmat.backends.openssl.backend import backend
__all__ = ["backend"]

Some files were not shown because too many files have changed in this diff Show More