Pushed by: admin License: 61BF9B31-726 (Enterprise) Timestamp: 2026-02-07T22:11:49.640314masterdev
parent
d57a8a800f
commit
a69088da24
@ -0,0 +1,4 @@
|
|||||||
|
[base]
|
||||||
|
builder_version = <string>
|
||||||
|
builder_build = <string>
|
||||||
|
is_edited = <bool>
|
||||||
@ -0,0 +1,3 @@
|
|||||||
|
[socradar_incidents_collector://<name>]
|
||||||
|
socradar_api_key =
|
||||||
|
socradar_company_id =
|
||||||
@ -0,0 +1,14 @@
|
|||||||
|
[proxy]
|
||||||
|
proxy_enabled =
|
||||||
|
proxy_type =
|
||||||
|
proxy_url =
|
||||||
|
proxy_port =
|
||||||
|
proxy_username =
|
||||||
|
proxy_password =
|
||||||
|
proxy_rdns =
|
||||||
|
|
||||||
|
[logging]
|
||||||
|
loglevel =
|
||||||
|
|
||||||
|
[additional_parameters]
|
||||||
|
how_many_days =
|
||||||
File diff suppressed because one or more lines are too long
@ -0,0 +1 @@
|
|||||||
|
{}
|
||||||
@ -0,0 +1,59 @@
|
|||||||
|
{
|
||||||
|
"schemaVersion": "2.0.0",
|
||||||
|
"info": {
|
||||||
|
"title": "SOCRadar Incidents",
|
||||||
|
"id": {
|
||||||
|
"group": null,
|
||||||
|
"name": "TA-socradar-incidents",
|
||||||
|
"version": "1.9.22"
|
||||||
|
},
|
||||||
|
"author": [
|
||||||
|
{
|
||||||
|
"name": "Burak Göger",
|
||||||
|
"email": null,
|
||||||
|
"company": null
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"releaseDate": null,
|
||||||
|
"description": "SOCRadar Incident Collector",
|
||||||
|
"classification": {
|
||||||
|
"intendedAudience": null,
|
||||||
|
"categories": [],
|
||||||
|
"developmentStatus": null
|
||||||
|
},
|
||||||
|
"commonInformationModels": null,
|
||||||
|
"license": {
|
||||||
|
"name": null,
|
||||||
|
"text": null,
|
||||||
|
"uri": null
|
||||||
|
},
|
||||||
|
"privacyPolicy": {
|
||||||
|
"name": null,
|
||||||
|
"text": null,
|
||||||
|
"uri": null
|
||||||
|
},
|
||||||
|
"releaseNotes": {
|
||||||
|
"name": null,
|
||||||
|
"text": null,
|
||||||
|
"uri": null
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
},
|
||||||
|
"tasks": [],
|
||||||
|
"inputGroups": {
|
||||||
|
},
|
||||||
|
"incompatibleApps": {
|
||||||
|
},
|
||||||
|
"platformRequirements": {
|
||||||
|
"splunk": {
|
||||||
|
"Enterprise": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"supportedDeployments": [
|
||||||
|
"*"
|
||||||
|
],
|
||||||
|
"targetWorkloads": [
|
||||||
|
"*"
|
||||||
|
]
|
||||||
|
}
|
||||||
@ -0,0 +1,93 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
import cherrypy
|
||||||
|
import json
|
||||||
|
import requests
|
||||||
|
import splunk.appserver.mrsparkle.controllers as controllers
|
||||||
|
from splunk.appserver.mrsparkle.lib.decorators import expose_page
|
||||||
|
from splunk.appserver.mrsparkle.lib.routes import route
|
||||||
|
import splunk.entity as entity
|
||||||
|
|
||||||
|
class UpdateStatusController(controllers.BaseController):
|
||||||
|
'''Update SOCRadar alarm status controller'''
|
||||||
|
|
||||||
|
@expose_page(must_login=True, methods=['POST'])
|
||||||
|
@route('/:app/:action=update_status')
|
||||||
|
def update_status(self, app, action, **kwargs):
|
||||||
|
try:
|
||||||
|
# Get the POST data
|
||||||
|
cl = cherrypy.request.headers.get('Content-Length', 0)
|
||||||
|
raw_body = cherrypy.request.body.read(int(cl))
|
||||||
|
data = json.loads(raw_body)
|
||||||
|
|
||||||
|
alarm_id = data.get('alarm_id')
|
||||||
|
status = data.get('status')
|
||||||
|
comments = data.get('comments', 'Updated via Splunk')
|
||||||
|
|
||||||
|
# Get SOCRadar settings
|
||||||
|
session_key = cherrypy.session.get('sessionKey')
|
||||||
|
settings = self.get_socradar_settings(session_key)
|
||||||
|
|
||||||
|
if not settings.get('company_id') or not settings.get('api_key'):
|
||||||
|
raise Exception("SOCRadar API credentials not configured")
|
||||||
|
|
||||||
|
# Make API call to SOCRadar
|
||||||
|
url = f"https://platform.socradar.com/api/company/{settings['company_id']}/alarms/status/change"
|
||||||
|
|
||||||
|
payload = {
|
||||||
|
"status": status,
|
||||||
|
"alarm_ids": alarm_id,
|
||||||
|
"comments": comments
|
||||||
|
}
|
||||||
|
|
||||||
|
params = {
|
||||||
|
"key": settings['api_key']
|
||||||
|
}
|
||||||
|
|
||||||
|
response = requests.post(url, json=payload, params=params, timeout=30)
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
return json.dumps({"success": True, "message": "Status updated successfully"})
|
||||||
|
else:
|
||||||
|
return json.dumps({"success": False, "message": f"API error: {response.status_code}"})
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
return json.dumps({"success": False, "message": str(e)})
|
||||||
|
|
||||||
|
def get_socradar_settings(self, session_key):
|
||||||
|
"""Get SOCRadar settings from app configuration"""
|
||||||
|
try:
|
||||||
|
# Try to get settings from the configuration
|
||||||
|
settings_entity = entity.getEntity(
|
||||||
|
'ta_socradar_incidents_settings',
|
||||||
|
'additional_parameters',
|
||||||
|
namespace='TA-socradar-incidents',
|
||||||
|
sessionKey=session_key,
|
||||||
|
owner='nobody'
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
'company_id': settings_entity.get('socradar_company_id'),
|
||||||
|
'api_key': settings_entity.get('socradar_company_api_key')
|
||||||
|
}
|
||||||
|
except:
|
||||||
|
# Fallback: read from conf file
|
||||||
|
import os
|
||||||
|
import configparser
|
||||||
|
|
||||||
|
conf_path = os.path.join(
|
||||||
|
os.environ.get('SPLUNK_HOME', '/opt/splunk'),
|
||||||
|
'etc', 'apps', 'TA-socradar-incidents', 'local',
|
||||||
|
'ta_socradar_incidents_settings.conf'
|
||||||
|
)
|
||||||
|
|
||||||
|
if os.path.exists(conf_path):
|
||||||
|
config = configparser.ConfigParser()
|
||||||
|
config.read(conf_path)
|
||||||
|
|
||||||
|
if 'additional_parameters' in config:
|
||||||
|
return {
|
||||||
|
'company_id': config['additional_parameters'].get('socradar_company_id'),
|
||||||
|
'api_key': config['additional_parameters'].get('socradar_company_api_key')
|
||||||
|
}
|
||||||
|
|
||||||
|
return {}
|
||||||
@ -0,0 +1,398 @@
|
|||||||
|
# Splunk Dashboard JavaScript MVC Guide
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
This guide explains how to properly implement JavaScript in Splunk dashboards to trigger searches and handle token values.
|
||||||
|
|
||||||
|
## 1. Basic Structure for Dashboard JavaScript
|
||||||
|
|
||||||
|
### Required Libraries
|
||||||
|
```javascript
|
||||||
|
require([
|
||||||
|
'underscore',
|
||||||
|
'jquery',
|
||||||
|
'splunkjs/mvc',
|
||||||
|
'splunkjs/mvc/searchmanager',
|
||||||
|
'splunkjs/mvc/simplexml/ready!'
|
||||||
|
], function(
|
||||||
|
_,
|
||||||
|
$,
|
||||||
|
mvc,
|
||||||
|
SearchManager
|
||||||
|
) {
|
||||||
|
// Your code here
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
### Important: Wait for Dashboard Ready
|
||||||
|
The `splunkjs/mvc/simplexml/ready!` module ensures the dashboard is fully loaded before your code runs.
|
||||||
|
|
||||||
|
## 2. Accessing MVC Components
|
||||||
|
|
||||||
|
### Get Dashboard Tokens
|
||||||
|
```javascript
|
||||||
|
// Get default token model
|
||||||
|
var tokens = mvc.Components.get("default");
|
||||||
|
|
||||||
|
// Get submitted token model
|
||||||
|
var submittedTokens = mvc.Components.get("submitted");
|
||||||
|
|
||||||
|
// Get a specific token value
|
||||||
|
var tokenValue = tokens.get("mytoken");
|
||||||
|
|
||||||
|
// Set a token value
|
||||||
|
tokens.set("mytoken", "new_value");
|
||||||
|
```
|
||||||
|
|
||||||
|
### Get Dashboard Elements
|
||||||
|
```javascript
|
||||||
|
// Get a search manager by ID
|
||||||
|
var searchManager = mvc.Components.get("my_search");
|
||||||
|
|
||||||
|
// Get a visualization by ID
|
||||||
|
var chart = mvc.Components.get("my_chart");
|
||||||
|
|
||||||
|
// Get an input by ID
|
||||||
|
var dropdown = mvc.Components.get("my_dropdown");
|
||||||
|
```
|
||||||
|
|
||||||
|
## 3. Creating and Executing Searches Programmatically
|
||||||
|
|
||||||
|
### Create a New Search
|
||||||
|
```javascript
|
||||||
|
var mySearch = new SearchManager({
|
||||||
|
id: "my_programmatic_search",
|
||||||
|
search: "index=socradar_incidents | stats count",
|
||||||
|
earliest_time: "-24h@h",
|
||||||
|
latest_time: "now",
|
||||||
|
preview: true,
|
||||||
|
cache: false,
|
||||||
|
autostart: false // Don't start automatically
|
||||||
|
});
|
||||||
|
|
||||||
|
// Start the search
|
||||||
|
mySearch.startSearch();
|
||||||
|
```
|
||||||
|
|
||||||
|
### Create a Search with Token Values
|
||||||
|
```javascript
|
||||||
|
// Using mvc.tokenSafe for token-aware searches
|
||||||
|
var mySearch = new SearchManager({
|
||||||
|
id: "update_status_search",
|
||||||
|
search: mvc.tokenSafe("| sendalert update_socradar_incident_status param.incident_id=$incident_id$ param.new_status=$new_status$"),
|
||||||
|
earliest_time: "-1m",
|
||||||
|
latest_time: "now",
|
||||||
|
autostart: false
|
||||||
|
});
|
||||||
|
|
||||||
|
// Token values will be automatically resolved when the search runs
|
||||||
|
```
|
||||||
|
|
||||||
|
## 4. Handling Search Results
|
||||||
|
|
||||||
|
### Listen for Search Events
|
||||||
|
```javascript
|
||||||
|
mySearch.on('search:done', function(properties) {
|
||||||
|
console.log("Search completed!");
|
||||||
|
|
||||||
|
// Get the results
|
||||||
|
var results = mySearch.data("results");
|
||||||
|
|
||||||
|
results.on("data", function() {
|
||||||
|
var data = results.data();
|
||||||
|
console.log("Results:", data);
|
||||||
|
|
||||||
|
// Process results
|
||||||
|
if (data && data.rows && data.rows.length > 0) {
|
||||||
|
// Handle successful results
|
||||||
|
alert("Status updated successfully!");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
mySearch.on('search:error', function(properties) {
|
||||||
|
console.error("Search error:", properties.content.messages);
|
||||||
|
});
|
||||||
|
|
||||||
|
mySearch.on('search:fail', function(properties) {
|
||||||
|
console.error("Search failed:", properties.content.messages);
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
## 5. Complete Example: Button Click Handler
|
||||||
|
|
||||||
|
### HTML Button (in dashboard XML)
|
||||||
|
```xml
|
||||||
|
<html>
|
||||||
|
<button id="update_status_btn" class="btn btn-primary">Update Status</button>
|
||||||
|
</html>
|
||||||
|
```
|
||||||
|
|
||||||
|
### JavaScript Implementation
|
||||||
|
```javascript
|
||||||
|
require([
|
||||||
|
'underscore',
|
||||||
|
'jquery',
|
||||||
|
'splunkjs/mvc',
|
||||||
|
'splunkjs/mvc/searchmanager',
|
||||||
|
'splunkjs/mvc/simplexml/ready!'
|
||||||
|
], function(
|
||||||
|
_,
|
||||||
|
$,
|
||||||
|
mvc,
|
||||||
|
SearchManager
|
||||||
|
) {
|
||||||
|
|
||||||
|
// Get token models
|
||||||
|
var tokens = mvc.Components.get("default");
|
||||||
|
var submittedTokens = mvc.Components.get("submitted");
|
||||||
|
|
||||||
|
// Button click handler
|
||||||
|
$("#update_status_btn").on("click", function() {
|
||||||
|
|
||||||
|
// Get token values
|
||||||
|
var incidentId = tokens.get("incident_id");
|
||||||
|
var newStatus = tokens.get("new_status");
|
||||||
|
|
||||||
|
// Validate inputs
|
||||||
|
if (!incidentId || !newStatus) {
|
||||||
|
alert("Please select an incident and status");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create search to update status
|
||||||
|
var updateSearch = new SearchManager({
|
||||||
|
id: "update_status_search_" + Date.now(), // Unique ID
|
||||||
|
search: "| sendalert update_socradar_incident_status param.incident_id=\"" + incidentId + "\" param.new_status=\"" + newStatus + "\"",
|
||||||
|
earliest_time: "-1m",
|
||||||
|
latest_time: "now",
|
||||||
|
autostart: false
|
||||||
|
});
|
||||||
|
|
||||||
|
// Handle search completion
|
||||||
|
updateSearch.on('search:done', function(properties) {
|
||||||
|
console.log("Status update completed");
|
||||||
|
|
||||||
|
// Refresh the main search to show updated data
|
||||||
|
var mainSearch = mvc.Components.get("incident_details_search");
|
||||||
|
if (mainSearch) {
|
||||||
|
mainSearch.startSearch();
|
||||||
|
}
|
||||||
|
|
||||||
|
alert("Status updated successfully!");
|
||||||
|
});
|
||||||
|
|
||||||
|
// Handle search errors
|
||||||
|
updateSearch.on('search:error search:fail', function(properties) {
|
||||||
|
console.error("Status update failed:", properties);
|
||||||
|
alert("Failed to update status. Please try again.");
|
||||||
|
});
|
||||||
|
|
||||||
|
// Start the search
|
||||||
|
updateSearch.startSearch();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
## 6. Best Practices
|
||||||
|
|
||||||
|
### 1. Always Wait for Dashboard Ready
|
||||||
|
```javascript
|
||||||
|
require(['splunkjs/mvc/simplexml/ready!'], function() {
|
||||||
|
// Dashboard is ready
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Use Token-Safe Strings
|
||||||
|
```javascript
|
||||||
|
// Good - tokens will be resolved
|
||||||
|
search: mvc.tokenSafe("index=main sourcetype=$sourcetype$")
|
||||||
|
|
||||||
|
// Bad - tokens won't be resolved
|
||||||
|
search: "index=main sourcetype=$sourcetype$"
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Unique Search Manager IDs
|
||||||
|
```javascript
|
||||||
|
// Add timestamp to ensure uniqueness
|
||||||
|
id: "my_search_" + Date.now()
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4. Clean Up Search Managers
|
||||||
|
```javascript
|
||||||
|
// Dispose of search when done
|
||||||
|
updateSearch.on('search:done', function() {
|
||||||
|
updateSearch.finalize();
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
### 5. Handle Token Changes
|
||||||
|
```javascript
|
||||||
|
// Listen for token changes
|
||||||
|
tokens.on("change:mytoken", function(model, value) {
|
||||||
|
console.log("Token changed:", value);
|
||||||
|
// React to token change
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
## 7. Common Patterns
|
||||||
|
|
||||||
|
### Execute Search on Token Change
|
||||||
|
```javascript
|
||||||
|
tokens.on("change:incident_id", function(model, value) {
|
||||||
|
if (value) {
|
||||||
|
var detailSearch = new SearchManager({
|
||||||
|
id: "incident_detail_" + Date.now(),
|
||||||
|
search: "index=socradar_incidents alarm_id=\"" + value + "\" | head 1",
|
||||||
|
autostart: true
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
### Conditional Search Execution
|
||||||
|
```javascript
|
||||||
|
$("#run_search_btn").click(function() {
|
||||||
|
var searchType = tokens.get("search_type");
|
||||||
|
|
||||||
|
var searchQuery;
|
||||||
|
switch(searchType) {
|
||||||
|
case "high_risk":
|
||||||
|
searchQuery = "index=socradar_incidents alarm_risk_level=HIGH";
|
||||||
|
break;
|
||||||
|
case "recent":
|
||||||
|
searchQuery = "index=socradar_incidents | head 100";
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
searchQuery = "index=socradar_incidents";
|
||||||
|
}
|
||||||
|
|
||||||
|
var search = new SearchManager({
|
||||||
|
id: "conditional_search_" + Date.now(),
|
||||||
|
search: searchQuery,
|
||||||
|
autostart: true
|
||||||
|
});
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
## 8. Debugging Tips
|
||||||
|
|
||||||
|
### 1. Check if Components Exist
|
||||||
|
```javascript
|
||||||
|
var component = mvc.Components.get("my_component");
|
||||||
|
if (component) {
|
||||||
|
console.log("Component found:", component);
|
||||||
|
} else {
|
||||||
|
console.error("Component not found!");
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. List All Components
|
||||||
|
```javascript
|
||||||
|
console.log("All components:", mvc.Components.toJSON());
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Debug Token Values
|
||||||
|
```javascript
|
||||||
|
console.log("All tokens:", tokens.toJSON());
|
||||||
|
console.log("Submitted tokens:", submittedTokens.toJSON());
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4. Monitor Search Progress
|
||||||
|
```javascript
|
||||||
|
mySearch.on('all', function(eventName) {
|
||||||
|
console.log("Search event:", eventName);
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
## Example: Complete Status Update Implementation
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
require([
|
||||||
|
'underscore',
|
||||||
|
'jquery',
|
||||||
|
'splunkjs/mvc',
|
||||||
|
'splunkjs/mvc/searchmanager',
|
||||||
|
'splunkjs/mvc/simplexml/ready!'
|
||||||
|
], function(_, $, mvc, SearchManager) {
|
||||||
|
|
||||||
|
console.log("Dashboard JavaScript loaded");
|
||||||
|
|
||||||
|
// Get token models
|
||||||
|
var tokens = mvc.Components.get("default");
|
||||||
|
var submittedTokens = mvc.Components.get("submitted");
|
||||||
|
|
||||||
|
// Debug: List all components
|
||||||
|
console.log("Available components:", Object.keys(mvc.Components.toJSON()));
|
||||||
|
|
||||||
|
// Set up button click handler
|
||||||
|
$(document).on("click", "#update_status_btn", function(e) {
|
||||||
|
e.preventDefault();
|
||||||
|
|
||||||
|
// Get current token values
|
||||||
|
var incidentId = tokens.get("incident_id");
|
||||||
|
var newStatus = tokens.get("new_status");
|
||||||
|
|
||||||
|
console.log("Updating incident:", incidentId, "to status:", newStatus);
|
||||||
|
|
||||||
|
if (!incidentId || !newStatus) {
|
||||||
|
alert("Please select an incident and a new status");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Disable button during update
|
||||||
|
var $btn = $(this);
|
||||||
|
$btn.prop('disabled', true).text('Updating...');
|
||||||
|
|
||||||
|
// Create the update search
|
||||||
|
var updateSearch = new SearchManager({
|
||||||
|
id: "status_update_" + Date.now(),
|
||||||
|
search: '| sendalert update_socradar_incident_status param.incident_id="' + incidentId + '" param.new_status="' + newStatus + '"',
|
||||||
|
earliest_time: "-1m",
|
||||||
|
latest_time: "now",
|
||||||
|
cache: false,
|
||||||
|
autostart: true
|
||||||
|
});
|
||||||
|
|
||||||
|
// Handle completion
|
||||||
|
updateSearch.on('search:done', function() {
|
||||||
|
console.log("Status update successful");
|
||||||
|
|
||||||
|
// Re-enable button
|
||||||
|
$btn.prop('disabled', false).text('Update Status');
|
||||||
|
|
||||||
|
// Show success message
|
||||||
|
alert("Status updated successfully!");
|
||||||
|
|
||||||
|
// Refresh the incidents table
|
||||||
|
var tableSearch = mvc.Components.get("incident_details_table_search");
|
||||||
|
if (tableSearch) {
|
||||||
|
tableSearch.startSearch();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clean up
|
||||||
|
updateSearch.finalize();
|
||||||
|
});
|
||||||
|
|
||||||
|
// Handle errors
|
||||||
|
updateSearch.on('search:error search:fail', function(state, job) {
|
||||||
|
console.error("Status update failed:", state);
|
||||||
|
|
||||||
|
// Re-enable button
|
||||||
|
$btn.prop('disabled', false).text('Update Status');
|
||||||
|
|
||||||
|
// Show error message
|
||||||
|
alert("Failed to update status. Please check the logs.");
|
||||||
|
|
||||||
|
// Clean up
|
||||||
|
updateSearch.finalize();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// Monitor token changes for debugging
|
||||||
|
tokens.on("change", function() {
|
||||||
|
console.log("Token changed:", tokens.toJSON());
|
||||||
|
});
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
This guide provides the foundation for implementing JavaScript in Splunk dashboards with proper MVC integration, token handling, and search execution.
|
||||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -0,0 +1,323 @@
|
|||||||
|
{
|
||||||
|
"meta": {
|
||||||
|
"name": "TA-socradar-incidents",
|
||||||
|
"displayName": "SOCRadar Incidents",
|
||||||
|
"version": "1.9.18",
|
||||||
|
"restRoot": "TA_socradar_incidents",
|
||||||
|
"schemaVersion": "0.0.8",
|
||||||
|
"_uccVersion": "5.48.0"
|
||||||
|
},
|
||||||
|
"pages": {
|
||||||
|
"configuration": {
|
||||||
|
"title": "Configuration",
|
||||||
|
"description": "Set up your add-on",
|
||||||
|
"tabs": [
|
||||||
|
{
|
||||||
|
"name": "proxy",
|
||||||
|
"title": "Proxy",
|
||||||
|
"entity": [
|
||||||
|
{
|
||||||
|
"field": "proxy_enabled",
|
||||||
|
"label": "Enable",
|
||||||
|
"type": "checkbox"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "proxy_type",
|
||||||
|
"label": "Proxy Type",
|
||||||
|
"type": "singleSelect",
|
||||||
|
"options": {
|
||||||
|
"disableSearch": true,
|
||||||
|
"autoCompleteFields": [
|
||||||
|
{
|
||||||
|
"label": "http",
|
||||||
|
"value": "http"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "socks4",
|
||||||
|
"value": "socks4"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "socks5",
|
||||||
|
"value": "socks5"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"defaultValue": "http"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "proxy_url",
|
||||||
|
"label": "Host",
|
||||||
|
"type": "text",
|
||||||
|
"validators": [
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"minLength": 0,
|
||||||
|
"maxLength": 4096,
|
||||||
|
"errorMsg": "Max host length is 4096"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "proxy_port",
|
||||||
|
"label": "Port",
|
||||||
|
"type": "text",
|
||||||
|
"validators": [
|
||||||
|
{
|
||||||
|
"type": "number",
|
||||||
|
"range": [
|
||||||
|
1,
|
||||||
|
65535
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "proxy_username",
|
||||||
|
"label": "Username",
|
||||||
|
"type": "text",
|
||||||
|
"validators": [
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"minLength": 0,
|
||||||
|
"maxLength": 50,
|
||||||
|
"errorMsg": "Max length of username is 50"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "proxy_password",
|
||||||
|
"label": "Password",
|
||||||
|
"type": "text",
|
||||||
|
"encrypted": true,
|
||||||
|
"validators": [
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"minLength": 0,
|
||||||
|
"maxLength": 8192,
|
||||||
|
"errorMsg": "Max length of password is 8192"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "proxy_rdns",
|
||||||
|
"label": "Remote DNS resolution",
|
||||||
|
"type": "checkbox"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"options": {
|
||||||
|
"saveValidator": "function(formData) { if(!formData.proxy_enabled || formData.proxy_enabled === '0') {return true; } if(!formData.proxy_url) { return 'Proxy Host can not be empty'; } if(!formData.proxy_port) { return 'Proxy Port can not be empty'; } return true; }"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "logging",
|
||||||
|
"title": "Logging",
|
||||||
|
"entity": [
|
||||||
|
{
|
||||||
|
"type": "singleSelect",
|
||||||
|
"label": "Log level",
|
||||||
|
"options": {
|
||||||
|
"disableSearch": true,
|
||||||
|
"autoCompleteFields": [
|
||||||
|
{
|
||||||
|
"value": "DEBUG",
|
||||||
|
"label": "DEBUG"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value": "INFO",
|
||||||
|
"label": "INFO"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value": "WARNING",
|
||||||
|
"label": "WARNING"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value": "ERROR",
|
||||||
|
"label": "ERROR"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value": "CRITICAL",
|
||||||
|
"label": "CRITICAL"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"defaultValue": "INFO",
|
||||||
|
"field": "loglevel",
|
||||||
|
"required": true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "additional_parameters",
|
||||||
|
"title": "Add-on Settings",
|
||||||
|
"entity": [
|
||||||
|
{
|
||||||
|
"field": "how_many_days",
|
||||||
|
"label": "How Many Days",
|
||||||
|
"type": "text",
|
||||||
|
"help": "",
|
||||||
|
"required": false,
|
||||||
|
"defaultValue": "",
|
||||||
|
"validators": [
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"minLength": 0,
|
||||||
|
"maxLength": 8192,
|
||||||
|
"errorMsg": "Max length of text input is 8192"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"inputs": {
|
||||||
|
"title": "Inputs",
|
||||||
|
"description": "Manage your data inputs",
|
||||||
|
"table": {
|
||||||
|
"header": [
|
||||||
|
{
|
||||||
|
"field": "name",
|
||||||
|
"label": "Name"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "interval",
|
||||||
|
"label": "Interval"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "index",
|
||||||
|
"label": "Index"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "disabled",
|
||||||
|
"label": "Status"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"moreInfo": [
|
||||||
|
{
|
||||||
|
"field": "name",
|
||||||
|
"label": "Name"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "interval",
|
||||||
|
"label": "Interval"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "index",
|
||||||
|
"label": "Index"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "disabled",
|
||||||
|
"label": "Status"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "socradar_api_key",
|
||||||
|
"label": "SOCRadar API Key"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "socradar_company_id",
|
||||||
|
"label": "SOCRadar Company ID"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"actions": [
|
||||||
|
"edit",
|
||||||
|
"enable",
|
||||||
|
"delete",
|
||||||
|
"clone"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"services": [
|
||||||
|
{
|
||||||
|
"name": "socradar_incidents_collector",
|
||||||
|
"title": "SOCRadar Incidents API Collector",
|
||||||
|
"entity": [
|
||||||
|
{
|
||||||
|
"field": "name",
|
||||||
|
"label": "Name",
|
||||||
|
"type": "text",
|
||||||
|
"help": "Enter a unique name for the data input",
|
||||||
|
"required": true,
|
||||||
|
"validators": [
|
||||||
|
{
|
||||||
|
"type": "regex",
|
||||||
|
"pattern": "^[a-zA-Z]\\w*$",
|
||||||
|
"errorMsg": "Input Name must start with a letter and followed by alphabetic letters, digits or underscores."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"minLength": 1,
|
||||||
|
"maxLength": 100,
|
||||||
|
"errorMsg": "Length of input name should be between 1 and 100"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "text",
|
||||||
|
"field": "interval",
|
||||||
|
"label": "Interval",
|
||||||
|
"validators": [
|
||||||
|
{
|
||||||
|
"type": "regex",
|
||||||
|
"errorMsg": "Interval must be either a non-negative number or -1.",
|
||||||
|
"pattern": "^(?:-1|\\d+(?:\\.\\d+)?)$"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"help": "Time interval of input in seconds.",
|
||||||
|
"required": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "index",
|
||||||
|
"label": "Index",
|
||||||
|
"type": "singleSelect",
|
||||||
|
"defaultValue": "default",
|
||||||
|
"options": {
|
||||||
|
"endpointUrl": "data/indexes",
|
||||||
|
"denyList": "^_.*$",
|
||||||
|
"createSearchChoice": true
|
||||||
|
},
|
||||||
|
"required": true,
|
||||||
|
"validators": [
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"minLength": 1,
|
||||||
|
"maxLength": 80,
|
||||||
|
"errorMsg": "Length of index name should be between 1 and 80."
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "socradar_api_key",
|
||||||
|
"label": "SOCRadar API Key",
|
||||||
|
"help": "",
|
||||||
|
"required": false,
|
||||||
|
"type": "text",
|
||||||
|
"defaultValue": "SOCRadar",
|
||||||
|
"validators": [
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"minLength": 0,
|
||||||
|
"maxLength": 8192,
|
||||||
|
"errorMsg": "Max length of text input is 8192"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "socradar_company_id",
|
||||||
|
"label": "SOCRadar Company ID",
|
||||||
|
"help": "",
|
||||||
|
"required": false,
|
||||||
|
"type": "text",
|
||||||
|
"validators": [
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"minLength": 0,
|
||||||
|
"maxLength": 8192,
|
||||||
|
"errorMsg": "Max length of text input is 8192"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,719 @@
|
|||||||
|
{
|
||||||
|
"openapi": "3.0.0",
|
||||||
|
"info": {
|
||||||
|
"title": "TA-socradar-incidents",
|
||||||
|
"version": "1.9.18",
|
||||||
|
"description": "SOCRadar Incidents",
|
||||||
|
"contact": {
|
||||||
|
"name": "Burak G\u00f6ger"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"servers": [
|
||||||
|
{
|
||||||
|
"url": "https://{domain}:{port}/servicesNS/-/TA-socradar-incidents",
|
||||||
|
"variables": {
|
||||||
|
"domain": {
|
||||||
|
"default": "localhost"
|
||||||
|
},
|
||||||
|
"port": {
|
||||||
|
"default": "8089"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"description": "Access via management interface"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"components": {
|
||||||
|
"schemas": {
|
||||||
|
"proxy": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"proxy_enabled": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"proxy_type": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"http",
|
||||||
|
"socks4",
|
||||||
|
"socks5"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"proxy_url": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"proxy_port": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"proxy_username": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"proxy_password": {
|
||||||
|
"type": "string",
|
||||||
|
"format": "password"
|
||||||
|
},
|
||||||
|
"proxy_rdns": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"proxy_without_name": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"proxy_enabled": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"proxy_type": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"http",
|
||||||
|
"socks4",
|
||||||
|
"socks5"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"proxy_url": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"proxy_port": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"proxy_username": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"proxy_password": {
|
||||||
|
"type": "string",
|
||||||
|
"format": "password"
|
||||||
|
},
|
||||||
|
"proxy_rdns": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"logging": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"loglevel": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"DEBUG",
|
||||||
|
"INFO",
|
||||||
|
"WARNING",
|
||||||
|
"ERROR",
|
||||||
|
"CRITICAL"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"logging_without_name": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"loglevel": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"DEBUG",
|
||||||
|
"INFO",
|
||||||
|
"WARNING",
|
||||||
|
"ERROR",
|
||||||
|
"CRITICAL"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"additional_parameters": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"how_many_days": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"additional_parameters_without_name": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"how_many_days": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"socradar_incidents_collector": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"interval": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"index": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"socradar_api_key": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"socradar_company_id": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"disabled": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"False",
|
||||||
|
"True"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"socradar_incidents_collector_without_name": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"interval": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"index": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"socradar_api_key": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"socradar_company_id": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"disabled": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"False",
|
||||||
|
"True"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"socradar_incidents_collector_without_disabled": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"interval": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"index": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"socradar_api_key": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"socradar_company_id": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"securitySchemes": {
|
||||||
|
"BasicAuth": {
|
||||||
|
"type": "http",
|
||||||
|
"scheme": "basic"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"paths": {
|
||||||
|
"/TA_socradar_incidents_settings/proxy": {
|
||||||
|
"get": {
|
||||||
|
"responses": {
|
||||||
|
"200": {
|
||||||
|
"description": "Get list of items for proxy",
|
||||||
|
"content": {
|
||||||
|
"application/json": {
|
||||||
|
"schema": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"entry": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"content": {
|
||||||
|
"$ref": "#/components/schemas/proxy_without_name"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"description": "Get list of items for proxy",
|
||||||
|
"deprecated": false
|
||||||
|
},
|
||||||
|
"post": {
|
||||||
|
"responses": {
|
||||||
|
"200": {
|
||||||
|
"description": "Create item in proxy",
|
||||||
|
"content": {
|
||||||
|
"application/json": {
|
||||||
|
"schema": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"entry": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"content": {
|
||||||
|
"$ref": "#/components/schemas/proxy_without_name"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"description": "Create item in proxy",
|
||||||
|
"requestBody": {
|
||||||
|
"content": {
|
||||||
|
"application/x-www-form-urlencoded": {
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/components/schemas/proxy"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": false
|
||||||
|
},
|
||||||
|
"deprecated": false
|
||||||
|
},
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"name": "output_mode",
|
||||||
|
"in": "query",
|
||||||
|
"required": true,
|
||||||
|
"description": "Output mode",
|
||||||
|
"schema": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"json"
|
||||||
|
],
|
||||||
|
"default": "json"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"/TA_socradar_incidents_settings/logging": {
|
||||||
|
"get": {
|
||||||
|
"responses": {
|
||||||
|
"200": {
|
||||||
|
"description": "Get list of items for logging",
|
||||||
|
"content": {
|
||||||
|
"application/json": {
|
||||||
|
"schema": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"entry": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"content": {
|
||||||
|
"$ref": "#/components/schemas/logging_without_name"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"description": "Get list of items for logging",
|
||||||
|
"deprecated": false
|
||||||
|
},
|
||||||
|
"post": {
|
||||||
|
"responses": {
|
||||||
|
"200": {
|
||||||
|
"description": "Create item in logging",
|
||||||
|
"content": {
|
||||||
|
"application/json": {
|
||||||
|
"schema": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"entry": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"content": {
|
||||||
|
"$ref": "#/components/schemas/logging_without_name"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"description": "Create item in logging",
|
||||||
|
"requestBody": {
|
||||||
|
"content": {
|
||||||
|
"application/x-www-form-urlencoded": {
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/components/schemas/logging"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": false
|
||||||
|
},
|
||||||
|
"deprecated": false
|
||||||
|
},
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"name": "output_mode",
|
||||||
|
"in": "query",
|
||||||
|
"required": true,
|
||||||
|
"description": "Output mode",
|
||||||
|
"schema": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"json"
|
||||||
|
],
|
||||||
|
"default": "json"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"/TA_socradar_incidents_settings/additional_parameters": {
|
||||||
|
"get": {
|
||||||
|
"responses": {
|
||||||
|
"200": {
|
||||||
|
"description": "Get list of items for additional_parameters",
|
||||||
|
"content": {
|
||||||
|
"application/json": {
|
||||||
|
"schema": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"entry": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"content": {
|
||||||
|
"$ref": "#/components/schemas/additional_parameters_without_name"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"description": "Get list of items for additional_parameters",
|
||||||
|
"deprecated": false
|
||||||
|
},
|
||||||
|
"post": {
|
||||||
|
"responses": {
|
||||||
|
"200": {
|
||||||
|
"description": "Create item in additional_parameters",
|
||||||
|
"content": {
|
||||||
|
"application/json": {
|
||||||
|
"schema": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"entry": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"content": {
|
||||||
|
"$ref": "#/components/schemas/additional_parameters_without_name"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"description": "Create item in additional_parameters",
|
||||||
|
"requestBody": {
|
||||||
|
"content": {
|
||||||
|
"application/x-www-form-urlencoded": {
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/components/schemas/additional_parameters"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": false
|
||||||
|
},
|
||||||
|
"deprecated": false
|
||||||
|
},
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"name": "output_mode",
|
||||||
|
"in": "query",
|
||||||
|
"required": true,
|
||||||
|
"description": "Output mode",
|
||||||
|
"schema": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"json"
|
||||||
|
],
|
||||||
|
"default": "json"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"/TA_socradar_incidents_socradar_incidents_collector": {
|
||||||
|
"get": {
|
||||||
|
"responses": {
|
||||||
|
"200": {
|
||||||
|
"description": "Get list of items for socradar_incidents_collector",
|
||||||
|
"content": {
|
||||||
|
"application/json": {
|
||||||
|
"schema": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"entry": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"content": {
|
||||||
|
"$ref": "#/components/schemas/socradar_incidents_collector_without_name"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"description": "Get list of items for socradar_incidents_collector",
|
||||||
|
"deprecated": false
|
||||||
|
},
|
||||||
|
"post": {
|
||||||
|
"responses": {
|
||||||
|
"200": {
|
||||||
|
"description": "Create item in socradar_incidents_collector",
|
||||||
|
"content": {
|
||||||
|
"application/json": {
|
||||||
|
"schema": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"entry": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"content": {
|
||||||
|
"$ref": "#/components/schemas/socradar_incidents_collector_without_name"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"description": "Create item in socradar_incidents_collector",
|
||||||
|
"requestBody": {
|
||||||
|
"content": {
|
||||||
|
"application/x-www-form-urlencoded": {
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/components/schemas/socradar_incidents_collector_without_disabled"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": false
|
||||||
|
},
|
||||||
|
"deprecated": false
|
||||||
|
},
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"name": "output_mode",
|
||||||
|
"in": "query",
|
||||||
|
"required": true,
|
||||||
|
"description": "Output mode",
|
||||||
|
"schema": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"json"
|
||||||
|
],
|
||||||
|
"default": "json"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"/TA_socradar_incidents_socradar_incidents_collector/{name}": {
|
||||||
|
"get": {
|
||||||
|
"responses": {
|
||||||
|
"200": {
|
||||||
|
"description": "Get socradar_incidents_collector item details",
|
||||||
|
"content": {
|
||||||
|
"application/json": {
|
||||||
|
"schema": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"entry": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"content": {
|
||||||
|
"$ref": "#/components/schemas/socradar_incidents_collector_without_name"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"description": "Get socradar_incidents_collector item details",
|
||||||
|
"deprecated": false
|
||||||
|
},
|
||||||
|
"post": {
|
||||||
|
"responses": {
|
||||||
|
"200": {
|
||||||
|
"description": "Update socradar_incidents_collector item",
|
||||||
|
"content": {
|
||||||
|
"application/json": {
|
||||||
|
"schema": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"entry": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"content": {
|
||||||
|
"$ref": "#/components/schemas/socradar_incidents_collector_without_name"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"description": "Update socradar_incidents_collector item",
|
||||||
|
"requestBody": {
|
||||||
|
"content": {
|
||||||
|
"application/x-www-form-urlencoded": {
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/components/schemas/socradar_incidents_collector_without_name"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": false
|
||||||
|
},
|
||||||
|
"deprecated": false
|
||||||
|
},
|
||||||
|
"delete": {
|
||||||
|
"responses": {
|
||||||
|
"200": {
|
||||||
|
"description": "Delete socradar_incidents_collector item",
|
||||||
|
"content": {
|
||||||
|
"application/json": {
|
||||||
|
"schema": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"entry": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"content": {
|
||||||
|
"$ref": "#/components/schemas/socradar_incidents_collector_without_name"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"description": "Delete socradar_incidents_collector item",
|
||||||
|
"deprecated": false
|
||||||
|
},
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"name": "name",
|
||||||
|
"in": "path",
|
||||||
|
"required": true,
|
||||||
|
"description": "The name of the item to operate on",
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "output_mode",
|
||||||
|
"in": "query",
|
||||||
|
"required": true,
|
||||||
|
"description": "Output mode",
|
||||||
|
"schema": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"json"
|
||||||
|
],
|
||||||
|
"default": "json"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"security": [
|
||||||
|
{
|
||||||
|
"BasicAuth": []
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
@ -0,0 +1,40 @@
|
|||||||
|
<!--
|
||||||
|
~ Copyright 2024 Splunk Inc.
|
||||||
|
~
|
||||||
|
~ Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
~ you may not use this file except in compliance with the License.
|
||||||
|
~ You may obtain a copy of the License at
|
||||||
|
~
|
||||||
|
~ http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
~
|
||||||
|
~ Unless required by applicable law or agreed to in writing, software
|
||||||
|
~ distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
~ See the License for the specific language governing permissions and
|
||||||
|
~ limitations under the License.
|
||||||
|
~
|
||||||
|
-->
|
||||||
|
<%! app_name = cherrypy.request.path_info.split('/')[3] %>\
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html class="no-js" lang="">
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8" />
|
||||||
|
<meta http-equiv="x-ua-compatible" content="ie=edge" />
|
||||||
|
<title>${_('Loading')}</title>
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||||
|
<link rel="apple-touch-icon" href="apple-touch-icon.png" />
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
<script src="${make_url('/config?autoload=1')}" crossorigin="use-credentials"></script>
|
||||||
|
<script src="${make_url('/static/js/i18n.js')}"></script>
|
||||||
|
<script src="${make_url('/i18ncatalog?autoload=1')}"></script>
|
||||||
|
<script>
|
||||||
|
__splunkd_partials__ = ${json_decode(splunkd)};
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<% page_path = "/static/app/" + app_name + "/js/build/entry_page.js" %>
|
||||||
|
|
||||||
|
<script src="${make_url(page_path)}"></script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
@ -0,0 +1,123 @@
|
|||||||
|
|
||||||
|
import ta_socradar_incidents_declare
|
||||||
|
|
||||||
|
from splunktaucclib.rest_handler.endpoint import (
|
||||||
|
field,
|
||||||
|
validator,
|
||||||
|
RestModel,
|
||||||
|
MultipleModel,
|
||||||
|
)
|
||||||
|
from splunktaucclib.rest_handler import admin_external, util
|
||||||
|
from splunk_aoblib.rest_migration import ConfigMigrationHandler
|
||||||
|
|
||||||
|
util.remove_http_proxy_env_vars()
|
||||||
|
|
||||||
|
|
||||||
|
fields_proxy = [
|
||||||
|
field.RestField(
|
||||||
|
'proxy_enabled',
|
||||||
|
required=False,
|
||||||
|
encrypted=False,
|
||||||
|
default=None,
|
||||||
|
validator=None
|
||||||
|
),
|
||||||
|
field.RestField(
|
||||||
|
'proxy_type',
|
||||||
|
required=False,
|
||||||
|
encrypted=False,
|
||||||
|
default='http',
|
||||||
|
validator=None
|
||||||
|
),
|
||||||
|
field.RestField(
|
||||||
|
'proxy_url',
|
||||||
|
required=False,
|
||||||
|
encrypted=False,
|
||||||
|
default=None,
|
||||||
|
validator=validator.String(
|
||||||
|
min_len=0,
|
||||||
|
max_len=4096,
|
||||||
|
)
|
||||||
|
),
|
||||||
|
field.RestField(
|
||||||
|
'proxy_port',
|
||||||
|
required=False,
|
||||||
|
encrypted=False,
|
||||||
|
default=None,
|
||||||
|
validator=validator.Number(
|
||||||
|
min_val=1,
|
||||||
|
max_val=65535,
|
||||||
|
)
|
||||||
|
),
|
||||||
|
field.RestField(
|
||||||
|
'proxy_username',
|
||||||
|
required=False,
|
||||||
|
encrypted=False,
|
||||||
|
default=None,
|
||||||
|
validator=validator.String(
|
||||||
|
min_len=0,
|
||||||
|
max_len=50,
|
||||||
|
)
|
||||||
|
),
|
||||||
|
field.RestField(
|
||||||
|
'proxy_password',
|
||||||
|
required=False,
|
||||||
|
encrypted=True,
|
||||||
|
default=None,
|
||||||
|
validator=validator.String(
|
||||||
|
min_len=0,
|
||||||
|
max_len=8192,
|
||||||
|
)
|
||||||
|
),
|
||||||
|
field.RestField(
|
||||||
|
'proxy_rdns',
|
||||||
|
required=False,
|
||||||
|
encrypted=False,
|
||||||
|
default=None,
|
||||||
|
validator=None
|
||||||
|
)
|
||||||
|
]
|
||||||
|
model_proxy = RestModel(fields_proxy, name='proxy')
|
||||||
|
|
||||||
|
|
||||||
|
fields_logging = [
|
||||||
|
field.RestField(
|
||||||
|
'loglevel',
|
||||||
|
required=False,
|
||||||
|
encrypted=False,
|
||||||
|
default='INFO',
|
||||||
|
validator=None
|
||||||
|
)
|
||||||
|
]
|
||||||
|
model_logging = RestModel(fields_logging, name='logging')
|
||||||
|
|
||||||
|
|
||||||
|
fields_additional_parameters = [
|
||||||
|
field.RestField(
|
||||||
|
'how_many_days',
|
||||||
|
required=False,
|
||||||
|
encrypted=False,
|
||||||
|
default='',
|
||||||
|
validator=validator.String(
|
||||||
|
min_len=0,
|
||||||
|
max_len=8192,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
]
|
||||||
|
model_additional_parameters = RestModel(fields_additional_parameters, name='additional_parameters')
|
||||||
|
|
||||||
|
|
||||||
|
endpoint = MultipleModel(
|
||||||
|
'ta_socradar_incidents_settings',
|
||||||
|
models=[
|
||||||
|
model_proxy,
|
||||||
|
model_logging,
|
||||||
|
model_additional_parameters
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
admin_external.handle(
|
||||||
|
endpoint,
|
||||||
|
handler=ConfigMigrationHandler,
|
||||||
|
)
|
||||||
@ -0,0 +1,78 @@
|
|||||||
|
|
||||||
|
import ta_socradar_incidents_declare
|
||||||
|
|
||||||
|
from splunktaucclib.rest_handler.endpoint import (
|
||||||
|
field,
|
||||||
|
validator,
|
||||||
|
RestModel,
|
||||||
|
DataInputModel,
|
||||||
|
)
|
||||||
|
from splunktaucclib.rest_handler import admin_external, util
|
||||||
|
from splunk_aoblib.rest_migration import ConfigMigrationHandler
|
||||||
|
|
||||||
|
util.remove_http_proxy_env_vars()
|
||||||
|
|
||||||
|
|
||||||
|
fields = [
|
||||||
|
field.RestField(
|
||||||
|
'interval',
|
||||||
|
required=True,
|
||||||
|
encrypted=False,
|
||||||
|
default=None,
|
||||||
|
validator=validator.Pattern(
|
||||||
|
regex=r"""^\-[1-9]\d*$|^\d*$""",
|
||||||
|
)
|
||||||
|
),
|
||||||
|
field.RestField(
|
||||||
|
'index',
|
||||||
|
required=True,
|
||||||
|
encrypted=False,
|
||||||
|
default='default',
|
||||||
|
validator=validator.String(
|
||||||
|
min_len=1,
|
||||||
|
max_len=80,
|
||||||
|
)
|
||||||
|
),
|
||||||
|
field.RestField(
|
||||||
|
'socradar_api_key',
|
||||||
|
required=False,
|
||||||
|
encrypted=False,
|
||||||
|
default='SOCRadar',
|
||||||
|
validator=validator.String(
|
||||||
|
min_len=0,
|
||||||
|
max_len=8192,
|
||||||
|
)
|
||||||
|
),
|
||||||
|
field.RestField(
|
||||||
|
'socradar_company_id',
|
||||||
|
required=False,
|
||||||
|
encrypted=False,
|
||||||
|
default=None,
|
||||||
|
validator=validator.String(
|
||||||
|
min_len=0,
|
||||||
|
max_len=8192,
|
||||||
|
)
|
||||||
|
),
|
||||||
|
|
||||||
|
field.RestField(
|
||||||
|
'disabled',
|
||||||
|
required=False,
|
||||||
|
validator=None
|
||||||
|
)
|
||||||
|
|
||||||
|
]
|
||||||
|
model = RestModel(fields, name=None)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
endpoint = DataInputModel(
|
||||||
|
'socradar_incidents_collector',
|
||||||
|
model,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
admin_external.handle(
|
||||||
|
endpoint,
|
||||||
|
handler=ConfigMigrationHandler,
|
||||||
|
)
|
||||||
Binary file not shown.
Binary file not shown.
@ -0,0 +1,392 @@
|
|||||||
|
"""
|
||||||
|
Splunk Add-on - SOCRadar Incidents v4 collector
|
||||||
|
Production-ready script for Splunk Modular Input with API pagination,
|
||||||
|
rate limit handling, and a per-run total new incident cap.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import time
|
||||||
|
import requests
|
||||||
|
from datetime import datetime, timedelta, timezone
|
||||||
|
from collections import OrderedDict
|
||||||
|
|
||||||
|
# Splunk Add-on SDK objects (helper, ew) are injected by Splunk when the script runs.
|
||||||
|
|
||||||
|
SOCRADAR_API_BASE_URL = "https://platform.socradar.com/api"
|
||||||
|
API_TIMEOUT_SECONDS = 30
|
||||||
|
DEFAULT_MAX_NEW_INCIDENTS_PER_RUN = 500
|
||||||
|
|
||||||
|
def validate_input(helper, definition):
|
||||||
|
"""
|
||||||
|
Validation for modular input configurations
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
def collect_events(helper, ew):
|
||||||
|
helper.log_info("SCRIPT_START: SOCRadar Incidents v4 collection run.")
|
||||||
|
|
||||||
|
# --- Stage 1: Get Configuration ---
|
||||||
|
# Get proxy settings
|
||||||
|
proxy_settings = helper.get_proxy()
|
||||||
|
proxies = None
|
||||||
|
|
||||||
|
# Build proxy configuration if enabled
|
||||||
|
if proxy_settings:
|
||||||
|
proxy_url = proxy_settings.get('proxy_url')
|
||||||
|
proxy_port = proxy_settings.get('proxy_port')
|
||||||
|
proxy_username = proxy_settings.get('proxy_username')
|
||||||
|
proxy_password = proxy_settings.get('proxy_password')
|
||||||
|
|
||||||
|
if proxy_url and proxy_port:
|
||||||
|
# Build proxy URL with authentication if provided
|
||||||
|
if proxy_username and proxy_password:
|
||||||
|
proxy = f"http://{proxy_username}:{proxy_password}@{proxy_url}:{proxy_port}"
|
||||||
|
else:
|
||||||
|
proxy = f"http://{proxy_url}:{proxy_port}"
|
||||||
|
|
||||||
|
proxies = {
|
||||||
|
'http': proxy,
|
||||||
|
'https': proxy
|
||||||
|
}
|
||||||
|
helper.log_info(f"Proxy configured: {proxy_url}:{proxy_port}")
|
||||||
|
|
||||||
|
# Get credentials from INPUT parameters (not global settings)
|
||||||
|
company_id = helper.get_arg("socradar_company_id")
|
||||||
|
api_key = helper.get_arg("socradar_api_key")
|
||||||
|
|
||||||
|
# Log what we retrieved (be careful with full API key)
|
||||||
|
helper.log_info(f"Retrieved company_id: {company_id}")
|
||||||
|
|
||||||
|
# Check if they might be swapped (company ID is usually shorter)
|
||||||
|
if company_id and api_key:
|
||||||
|
if len(company_id) > 20 and len(api_key) < 20:
|
||||||
|
helper.log_warning("WARNING: API key and company ID might be swapped! Company ID is usually shorter.")
|
||||||
|
# Swap them
|
||||||
|
helper.log_info("Swapping credentials...")
|
||||||
|
company_id, api_key = api_key, company_id
|
||||||
|
helper.log_info(f"After swap - company_id: {company_id}, api_key: {api_key[:10]}...")
|
||||||
|
|
||||||
|
if not company_id or not api_key:
|
||||||
|
helper.log_error("Missing credentials. Please configure socradar_company_id and socradar_api_key in the input configuration.")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Get how many days from GLOBAL settings (add-on level)
|
||||||
|
how_many_days_str = helper.get_global_setting("how_many_days")
|
||||||
|
if how_many_days_str:
|
||||||
|
try:
|
||||||
|
how_many_days = max(1, int(how_many_days_str))
|
||||||
|
except:
|
||||||
|
how_many_days = 1
|
||||||
|
else:
|
||||||
|
how_many_days = 1
|
||||||
|
|
||||||
|
helper.log_info(f"Configuration: Looking back {how_many_days} days")
|
||||||
|
|
||||||
|
# Get max incidents per run from global settings
|
||||||
|
try:
|
||||||
|
max_new_incidents_per_run = int(helper.get_global_setting("total_limit") or DEFAULT_MAX_NEW_INCIDENTS_PER_RUN)
|
||||||
|
except:
|
||||||
|
max_new_incidents_per_run = DEFAULT_MAX_NEW_INCIDENTS_PER_RUN
|
||||||
|
|
||||||
|
# Get Splunk settings
|
||||||
|
input_stanza_name = helper.get_input_stanza_names()
|
||||||
|
output_index = helper.get_output_index()
|
||||||
|
sourcetype = helper.get_sourcetype()
|
||||||
|
input_type = helper.get_input_type()
|
||||||
|
|
||||||
|
# Log the actual input stanza name for debugging
|
||||||
|
helper.log_info(f"Input stanza name from helper: {input_stanza_name}")
|
||||||
|
|
||||||
|
# --- Stage 2: Load Checkpoint ---
|
||||||
|
checkpoint_key = f"{input_stanza_name}_socradar_v4_processed_alarms"
|
||||||
|
processed_alarms = {} # Changed to dict to store alarm_id -> status
|
||||||
|
indexed_version = "1.9.22" # Default to current version for new checkpoints
|
||||||
|
|
||||||
|
try:
|
||||||
|
checkpoint_data_raw = helper.get_check_point(checkpoint_key)
|
||||||
|
if checkpoint_data_raw:
|
||||||
|
checkpoint_data = json.loads(checkpoint_data_raw)
|
||||||
|
# Support both old format (list) and new format (dict)
|
||||||
|
if isinstance(checkpoint_data.get("alarm_ids"), list):
|
||||||
|
# Old format - convert to dict with string keys
|
||||||
|
for alarm_id in checkpoint_data.get("alarm_ids", []):
|
||||||
|
processed_alarms[str(alarm_id)] = None
|
||||||
|
indexed_version = "1.0.0" # Old format = pre-v1.9.20
|
||||||
|
else:
|
||||||
|
# New format - dict with status, ensure all keys are strings
|
||||||
|
temp_alarms = checkpoint_data.get("alarm_status", {})
|
||||||
|
processed_alarms = {str(k): v for k, v in temp_alarms.items()}
|
||||||
|
# Get indexed version from checkpoint (default to 1.0.0 if not present)
|
||||||
|
indexed_version = checkpoint_data.get("indexed_version", "1.0.0")
|
||||||
|
helper.log_info(f"Loaded checkpoint with {len(processed_alarms)} already processed alarm IDs (indexed_version: {indexed_version})")
|
||||||
|
except:
|
||||||
|
helper.log_info("No checkpoint found, starting fresh")
|
||||||
|
|
||||||
|
# --- Stage 3: Calculate Time Window ---
|
||||||
|
current_time = datetime.now(timezone.utc)
|
||||||
|
start_time = current_time - timedelta(days=how_many_days)
|
||||||
|
|
||||||
|
# Convert to Unix timestamps (SOCRadar format)
|
||||||
|
end_timestamp = int(current_time.timestamp())
|
||||||
|
start_timestamp = int(start_time.timestamp())
|
||||||
|
|
||||||
|
helper.log_info(f"Time window: {start_time.strftime('%Y-%m-%d %H:%M')} to {current_time.strftime('%Y-%m-%d %H:%M')}")
|
||||||
|
|
||||||
|
# --- Stage 4: Build API URL ---
|
||||||
|
api_url = f"{SOCRADAR_API_BASE_URL}/company/{company_id}/incidents/v4"
|
||||||
|
helper.log_info(f"API URL: {api_url}")
|
||||||
|
|
||||||
|
# --- Stage 5: Fetch Incidents with Pagination ---
|
||||||
|
all_incidents = []
|
||||||
|
page = 1
|
||||||
|
consecutive_rate_limits = 0
|
||||||
|
new_incidents_count = 0
|
||||||
|
duplicate_count = 0
|
||||||
|
|
||||||
|
# Fetch all pages until API returns no more data (no page limit)
|
||||||
|
while True:
|
||||||
|
# API parameters - SOCRadar expects key as URL parameter for incidents endpoint
|
||||||
|
params = {
|
||||||
|
"key": api_key,
|
||||||
|
"limit": 100, # Get 100 per page from SOCRadar
|
||||||
|
"page": page,
|
||||||
|
"start_date": start_timestamp,
|
||||||
|
"end_date": end_timestamp
|
||||||
|
}
|
||||||
|
|
||||||
|
# Log the request details
|
||||||
|
helper.log_info(f"Making request to page {page}")
|
||||||
|
helper.log_debug(f"Request params: {params}")
|
||||||
|
|
||||||
|
# Make request
|
||||||
|
try:
|
||||||
|
response = requests.get(api_url, params=params, timeout=API_TIMEOUT_SECONDS, proxies=proxies)
|
||||||
|
|
||||||
|
# Log response status
|
||||||
|
helper.log_info(f"Response status code: {response.status_code}")
|
||||||
|
|
||||||
|
# Check for rate limit
|
||||||
|
if response.status_code == 429 or "rate limit exceeded" in response.text.lower():
|
||||||
|
consecutive_rate_limits += 1
|
||||||
|
|
||||||
|
if consecutive_rate_limits == 1:
|
||||||
|
wait_time = 30
|
||||||
|
else:
|
||||||
|
wait_time = 60
|
||||||
|
|
||||||
|
helper.log_warning(f"Rate limit hit! Waiting {wait_time} seconds... (attempt {consecutive_rate_limits})")
|
||||||
|
time.sleep(wait_time)
|
||||||
|
continue # Retry same page
|
||||||
|
|
||||||
|
# Reset rate limit counter on success
|
||||||
|
consecutive_rate_limits = 0
|
||||||
|
|
||||||
|
# Check for authentication error
|
||||||
|
if response.status_code == 401:
|
||||||
|
helper.log_error(f"API error: HTTP 401 Unauthorized. Please check your API credentials.")
|
||||||
|
helper.log_error(f"Response: {response.text}")
|
||||||
|
break
|
||||||
|
|
||||||
|
# Check for other errors
|
||||||
|
if response.status_code != 200:
|
||||||
|
helper.log_error(f"API error: HTTP {response.status_code}")
|
||||||
|
helper.log_error(f"Response: {response.text}")
|
||||||
|
break
|
||||||
|
|
||||||
|
# Parse response
|
||||||
|
data = response.json()
|
||||||
|
incidents = data.get("data", [])
|
||||||
|
|
||||||
|
if not incidents:
|
||||||
|
helper.log_info(f"No more incidents at page {page}")
|
||||||
|
break
|
||||||
|
|
||||||
|
# Process incidents on this page
|
||||||
|
for incident in incidents:
|
||||||
|
# Truncate long text fields to 5000 characters
|
||||||
|
if 'alarm_text' in incident and incident['alarm_text'] and len(str(incident['alarm_text'])) > 5000:
|
||||||
|
incident['alarm_text'] = str(incident['alarm_text'])[:5000] + '...'
|
||||||
|
if 'alarm_response' in incident and incident['alarm_response'] and len(str(incident['alarm_response'])) > 5000:
|
||||||
|
incident['alarm_response'] = str(incident['alarm_response'])[:5000] + '...'
|
||||||
|
if 'alarm_type_details' in incident and isinstance(incident['alarm_type_details'], dict):
|
||||||
|
if 'alarm_default_mitigation_plan' in incident['alarm_type_details'] and incident['alarm_type_details']['alarm_default_mitigation_plan']:
|
||||||
|
if len(str(incident['alarm_type_details']['alarm_default_mitigation_plan'])) > 5000:
|
||||||
|
incident['alarm_type_details']['alarm_default_mitigation_plan'] = str(incident['alarm_type_details']['alarm_default_mitigation_plan'])[:5000] + '...'
|
||||||
|
# Extract alarm main type and sub type
|
||||||
|
incident['alarm_main_type'] = incident['alarm_type_details'].get('alarm_main_type', 'N/A')
|
||||||
|
incident['alarm_sub_type'] = incident['alarm_type_details'].get('alarm_sub_type', 'N/A')
|
||||||
|
else:
|
||||||
|
# Set defaults if alarm_type_details is not present
|
||||||
|
incident['alarm_main_type'] = 'N/A'
|
||||||
|
incident['alarm_sub_type'] = 'N/A'
|
||||||
|
|
||||||
|
alarm_id = incident.get("alarm_id")
|
||||||
|
|
||||||
|
# Generate alarm link
|
||||||
|
if alarm_id and company_id:
|
||||||
|
incident['alarm_link'] = f"https://platform.socradar.com/app/company/{company_id}/alarm-management?tab=approved&alarmId={alarm_id}"
|
||||||
|
else:
|
||||||
|
incident['alarm_link'] = 'N/A'
|
||||||
|
current_status = incident.get("status", "N/A")
|
||||||
|
helper.log_info(f"Processing Alarm ID: {alarm_id} - Status: {current_status}")
|
||||||
|
|
||||||
|
# Check if already processed and if status changed
|
||||||
|
if alarm_id:
|
||||||
|
# Convert alarm_id to string for consistent comparison
|
||||||
|
alarm_id_str = str(alarm_id)
|
||||||
|
if alarm_id_str in processed_alarms:
|
||||||
|
old_status = processed_alarms.get(alarm_id_str)
|
||||||
|
helper.log_debug(f"Checkpoint check - alarm: {alarm_id_str}, old_status: {old_status}, current_status: {current_status}")
|
||||||
|
|
||||||
|
# Force re-index if indexed with version < 1.9.20 (TRUNCATE fix)
|
||||||
|
# This ensures events that were previously truncated get re-indexed with complete data
|
||||||
|
if indexed_version < "1.9.20":
|
||||||
|
helper.log_info(f"Force re-indexing alarm {alarm_id} (indexed with v{indexed_version}, needs complete data)")
|
||||||
|
incident['reindexed'] = True
|
||||||
|
incident['reindex_reason'] = f"Upgraded from v{indexed_version} (TRUNCATE fix)"
|
||||||
|
incident['previous_status'] = old_status
|
||||||
|
# Continue to index this event
|
||||||
|
elif old_status == current_status:
|
||||||
|
# Status unchanged and already indexed with good version - skip
|
||||||
|
duplicate_count += 1
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
# Status changed - index the update
|
||||||
|
helper.log_info(f"Status changed for {alarm_id}: {old_status} -> {current_status}")
|
||||||
|
incident['status_changed'] = True
|
||||||
|
incident['previous_status'] = old_status
|
||||||
|
|
||||||
|
# New incident or status update
|
||||||
|
all_incidents.append(incident)
|
||||||
|
new_incidents_count += 1
|
||||||
|
|
||||||
|
# Stop if we hit the per-run limit
|
||||||
|
if new_incidents_count >= max_new_incidents_per_run:
|
||||||
|
helper.log_info(f"Reached max incidents per run ({max_new_incidents_per_run})")
|
||||||
|
break
|
||||||
|
|
||||||
|
helper.log_info(f"Page {page}: Got {len(incidents)} incidents ({new_incidents_count} new/updated so far, {duplicate_count} unchanged)")
|
||||||
|
|
||||||
|
# Stop if we hit the limit
|
||||||
|
if new_incidents_count >= max_new_incidents_per_run:
|
||||||
|
break
|
||||||
|
|
||||||
|
# Check if this was the last page
|
||||||
|
if len(incidents) < 100:
|
||||||
|
helper.log_info("Reached last page")
|
||||||
|
break
|
||||||
|
|
||||||
|
page += 1
|
||||||
|
time.sleep(3) # Wait 3 seconds between requests for better API stability
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
helper.log_error(f"Error on page {page}: {str(e)}")
|
||||||
|
break
|
||||||
|
|
||||||
|
helper.log_info(f"Fetching complete. Found {new_incidents_count} new/updated incidents, skipped {duplicate_count} unchanged")
|
||||||
|
|
||||||
|
# --- Stage 6: Index New Incidents to Splunk in Batches of 15 ---
|
||||||
|
indexed_count = 0
|
||||||
|
indexed_alarms = {} # Changed to dict to store alarm_id -> status
|
||||||
|
batch_size = 15 # Reduced for better Splunk indexing performance
|
||||||
|
|
||||||
|
# Process in batches of 15
|
||||||
|
for batch_start in range(0, len(all_incidents), batch_size):
|
||||||
|
batch_end = min(batch_start + batch_size, len(all_incidents))
|
||||||
|
batch = all_incidents[batch_start:batch_end]
|
||||||
|
|
||||||
|
helper.log_info(f"Indexing batch {batch_start//batch_size + 1}: incidents {batch_start+1} to {batch_end}")
|
||||||
|
|
||||||
|
for incident in batch:
|
||||||
|
alarm_id = incident.get("alarm_id")
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Get event time from incident
|
||||||
|
event_time = None
|
||||||
|
date_str = incident.get("date") # Using 'date' field based on your sample
|
||||||
|
if date_str:
|
||||||
|
try:
|
||||||
|
# Parse date like "2025-03-18 10:40:10"
|
||||||
|
event_datetime = datetime.strptime(date_str, "%Y-%m-%d %H:%M:%S")
|
||||||
|
event_time = event_datetime.timestamp()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Log what we're indexing for debugging
|
||||||
|
if incident.get('status_changed'):
|
||||||
|
helper.log_info(f"Indexing status change for alarm {alarm_id}: {incident.get('previous_status')} -> {incident.get('status')}")
|
||||||
|
helper.log_debug(f"Full incident data: {json.dumps(incident)[:500]}...")
|
||||||
|
|
||||||
|
# Create optimized event with critical fields first
|
||||||
|
# This ensures critical fields survive if event gets truncated
|
||||||
|
optimized_incident = OrderedDict()
|
||||||
|
|
||||||
|
# Critical fields FIRST (always visible even if truncated)
|
||||||
|
optimized_incident['alarm_id'] = incident.get('alarm_id')
|
||||||
|
optimized_incident['status'] = incident.get('status')
|
||||||
|
optimized_incident['date'] = incident.get('date')
|
||||||
|
optimized_incident['alarm_risk_level'] = incident.get('alarm_risk_level')
|
||||||
|
optimized_incident['alarm_asset'] = incident.get('alarm_asset')
|
||||||
|
optimized_incident['alarm_main_type'] = incident.get('alarm_main_type')
|
||||||
|
optimized_incident['alarm_sub_type'] = incident.get('alarm_sub_type')
|
||||||
|
optimized_incident['alarm_link'] = incident.get('alarm_link')
|
||||||
|
|
||||||
|
# Add all remaining fields
|
||||||
|
for key, value in incident.items():
|
||||||
|
if key not in optimized_incident:
|
||||||
|
optimized_incident[key] = value
|
||||||
|
|
||||||
|
# Create Splunk event
|
||||||
|
event = helper.new_event(
|
||||||
|
data=json.dumps(optimized_incident),
|
||||||
|
index=output_index,
|
||||||
|
source=input_type,
|
||||||
|
sourcetype=sourcetype,
|
||||||
|
time=event_time
|
||||||
|
)
|
||||||
|
ew.write_event(event)
|
||||||
|
|
||||||
|
indexed_count += 1
|
||||||
|
if alarm_id:
|
||||||
|
# Store alarm_id as string for consistent checkpoint handling
|
||||||
|
indexed_alarms[str(alarm_id)] = incident.get('status')
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
helper.log_error(f"Failed to index incident {alarm_id}: {str(e)}")
|
||||||
|
|
||||||
|
# Log progress after each batch
|
||||||
|
helper.log_info(f"Batch complete. Total indexed: {indexed_count}")
|
||||||
|
|
||||||
|
helper.log_info(f"Indexing complete. Indexed {indexed_count} new incidents to Splunk")
|
||||||
|
|
||||||
|
# --- Stage 7: Update Checkpoint ---
|
||||||
|
if indexed_alarms:
|
||||||
|
# Update checkpoint with new alarms and their statuses
|
||||||
|
processed_alarms.update(indexed_alarms)
|
||||||
|
|
||||||
|
# Save checkpoint with new format including version tracking
|
||||||
|
# No size limit - supports unlimited active alarms
|
||||||
|
checkpoint_data = {
|
||||||
|
"alarm_status": processed_alarms,
|
||||||
|
"indexed_version": "1.9.22", # Current version with TRUNCATE fix
|
||||||
|
"last_updated": datetime.now(timezone.utc).isoformat()
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
helper.save_check_point(checkpoint_key, json.dumps(checkpoint_data))
|
||||||
|
helper.log_info(f"Checkpoint updated. Total tracked alarms: {len(processed_alarms)}")
|
||||||
|
except Exception as e:
|
||||||
|
helper.log_error(f"Failed to save checkpoint: {str(e)}")
|
||||||
|
|
||||||
|
helper.log_info("SCRIPT_END: SOCRadar Incidents v4 collection complete.")
|
||||||
|
|
||||||
|
|
||||||
|
from splunklib.modularinput import Scheme
|
||||||
|
|
||||||
|
def get_scheme():
|
||||||
|
"""Returns scheme parameters for this modular input."""
|
||||||
|
scheme = Scheme("SOCRadar Incidents Collector v4")
|
||||||
|
scheme.description = "Collects incidents from the SOCRadar v4 API."
|
||||||
|
scheme.use_external_validation = True
|
||||||
|
scheme.use_single_instance = False
|
||||||
|
return scheme
|
||||||
@ -0,0 +1,94 @@
|
|||||||
|
import ta_socradar_incidents_declare
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
import datetime
|
||||||
|
import json
|
||||||
|
|
||||||
|
import modinput_wrapper.base_modinput
|
||||||
|
from splunklib import modularinput as smi
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
import input_module_socradar_incidents_collector as input_module
|
||||||
|
|
||||||
|
bin_dir = os.path.basename(__file__)
|
||||||
|
|
||||||
|
'''
|
||||||
|
Do not edit this file!!!
|
||||||
|
This file is generated by Add-on builder automatically.
|
||||||
|
Add your modular input logic to file input_module_socradar_incidents_collector.py
|
||||||
|
'''
|
||||||
|
class ModInputsocradar_incidents_collector(modinput_wrapper.base_modinput.BaseModInput):
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
if 'use_single_instance_mode' in dir(input_module):
|
||||||
|
use_single_instance = input_module.use_single_instance_mode()
|
||||||
|
else:
|
||||||
|
use_single_instance = False
|
||||||
|
super(ModInputsocradar_incidents_collector, self).__init__("ta_socradar_incidents", "socradar_incidents_collector", use_single_instance)
|
||||||
|
self.global_checkbox_fields = None
|
||||||
|
|
||||||
|
def get_scheme(self):
|
||||||
|
"""overloaded splunklib modularinput method"""
|
||||||
|
scheme = super(ModInputsocradar_incidents_collector, self).get_scheme()
|
||||||
|
scheme.title = ("SOCRadar Incidents API Collector")
|
||||||
|
scheme.description = ("Go to the add-on\'s configuration UI and configure modular inputs under the Inputs menu.")
|
||||||
|
scheme.use_external_validation = True
|
||||||
|
scheme.streaming_mode_xml = True
|
||||||
|
|
||||||
|
scheme.add_argument(smi.Argument("name", title="Name",
|
||||||
|
description="",
|
||||||
|
required_on_create=True))
|
||||||
|
|
||||||
|
"""
|
||||||
|
For customized inputs, hard code the arguments here to hide argument detail from users.
|
||||||
|
For other input types, arguments should be get from input_module. Defining new input types could be easier.
|
||||||
|
"""
|
||||||
|
scheme.add_argument(smi.Argument("socradar_api_key", title="SOCRadar API Key",
|
||||||
|
description="",
|
||||||
|
required_on_create=False,
|
||||||
|
required_on_edit=False))
|
||||||
|
scheme.add_argument(smi.Argument("socradar_company_id", title="SOCRadar Company ID",
|
||||||
|
description="",
|
||||||
|
required_on_create=False,
|
||||||
|
required_on_edit=False))
|
||||||
|
return scheme
|
||||||
|
|
||||||
|
def get_app_name(self):
|
||||||
|
return "TA-socradar-incidents"
|
||||||
|
|
||||||
|
def validate_input(self, definition):
|
||||||
|
"""validate the input stanza"""
|
||||||
|
input_module.validate_input(self, definition)
|
||||||
|
|
||||||
|
def collect_events(self, ew):
|
||||||
|
"""write out the events"""
|
||||||
|
input_module.collect_events(self, ew)
|
||||||
|
|
||||||
|
def get_account_fields(self):
|
||||||
|
account_fields = []
|
||||||
|
return account_fields
|
||||||
|
|
||||||
|
def get_checkbox_fields(self):
|
||||||
|
checkbox_fields = []
|
||||||
|
return checkbox_fields
|
||||||
|
|
||||||
|
def get_global_checkbox_fields(self):
|
||||||
|
if self.global_checkbox_fields is None:
|
||||||
|
checkbox_name_file = os.path.join(bin_dir, 'global_checkbox_param.json')
|
||||||
|
try:
|
||||||
|
if os.path.isfile(checkbox_name_file):
|
||||||
|
with open(checkbox_name_file, 'r') as fp:
|
||||||
|
self.global_checkbox_fields = json.load(fp)
|
||||||
|
else:
|
||||||
|
self.global_checkbox_fields = []
|
||||||
|
except Exception as e:
|
||||||
|
self.log_error('Get exception when loading global checkbox parameter names. ' + str(e))
|
||||||
|
self.global_checkbox_fields = []
|
||||||
|
return self.global_checkbox_fields
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
exitcode = ModInputsocradar_incidents_collector().run(sys.argv)
|
||||||
|
sys.exit(exitcode)
|
||||||
@ -0,0 +1,228 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import requests
|
||||||
|
import time
|
||||||
|
import json
|
||||||
|
import subprocess
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
# Debug log file
|
||||||
|
DEBUG_LOG = "/opt/splunk/var/log/socradar/status_update_log.txt"
|
||||||
|
|
||||||
|
def log_debug(msg):
|
||||||
|
os.makedirs(os.path.dirname(DEBUG_LOG), exist_ok=True)
|
||||||
|
with open(DEBUG_LOG, 'a') as f:
|
||||||
|
f.write(f"[{datetime.now()}] {msg}\n")
|
||||||
|
|
||||||
|
# Simple version that outputs CSV directly
|
||||||
|
print("alarm_id,status,result,message,time")
|
||||||
|
|
||||||
|
try:
|
||||||
|
log_debug("=== NEW STATUS UPDATE REQUEST ===")
|
||||||
|
|
||||||
|
# Read all input
|
||||||
|
input_lines = []
|
||||||
|
for line in sys.stdin:
|
||||||
|
input_lines.append(line.strip())
|
||||||
|
|
||||||
|
log_debug(f"Input lines: {input_lines}")
|
||||||
|
|
||||||
|
# Parse alarm_id and new_status from input
|
||||||
|
alarm_id = ""
|
||||||
|
new_status = ""
|
||||||
|
|
||||||
|
for line in input_lines:
|
||||||
|
if "alarm_id=" in line:
|
||||||
|
parts = line.split(',')
|
||||||
|
for part in parts:
|
||||||
|
if 'alarm_id=' in part:
|
||||||
|
alarm_id = part.split('=', 1)[1].strip().strip('"')
|
||||||
|
elif 'new_status=' in part:
|
||||||
|
new_status = part.split('=', 1)[1].strip().strip('"')
|
||||||
|
|
||||||
|
log_debug(f"Parsed: alarm_id={alarm_id}, new_status={new_status}")
|
||||||
|
|
||||||
|
if not alarm_id:
|
||||||
|
print("NONE,No alarm selected,WAITING,Enter an alarm ID," + str(int(time.time())))
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
# Get credentials
|
||||||
|
conf_file = "/opt/splunk/etc/apps/TA-socradar-incidents/local/inputs.conf"
|
||||||
|
company_id = None
|
||||||
|
api_key = None
|
||||||
|
|
||||||
|
if os.path.exists(conf_file):
|
||||||
|
with open(conf_file, 'r') as f:
|
||||||
|
for line in f:
|
||||||
|
line = line.strip()
|
||||||
|
if line.startswith('socradar_company_id'):
|
||||||
|
company_id = line.split('=', 1)[1].strip()
|
||||||
|
elif line.startswith('socradar_api_key'):
|
||||||
|
api_key = line.split('=', 1)[1].strip()
|
||||||
|
|
||||||
|
if not company_id or not api_key:
|
||||||
|
log_debug("Missing credentials")
|
||||||
|
print(f"{alarm_id},Error,ERROR,Missing credentials," + str(int(time.time())))
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
# Status mapping
|
||||||
|
status_map = {
|
||||||
|
'0': 'Open',
|
||||||
|
'1': 'OnHold-Investigating',
|
||||||
|
'2': 'Closed-Resolved',
|
||||||
|
'3': 'OnHold-Pending',
|
||||||
|
'5': 'OnHold-Legal',
|
||||||
|
'9': 'Closed-FalsePositive',
|
||||||
|
'10': 'Closed-Duplicate',
|
||||||
|
'11': 'Closed-ProcessedInternally',
|
||||||
|
'12': 'Closed-Mitigated',
|
||||||
|
'13': 'Closed-NotApplicable'
|
||||||
|
}
|
||||||
|
|
||||||
|
status_name = status_map.get(new_status, f'Status-{new_status}')
|
||||||
|
|
||||||
|
# Make API call
|
||||||
|
url = f"https://platform.socradar.com/api/company/{company_id}/alarms/status/change?key={api_key}"
|
||||||
|
payload = {
|
||||||
|
"status": new_status,
|
||||||
|
"alarm_ids": alarm_id,
|
||||||
|
"comments": f"Updated via Splunk to: {status_name}"
|
||||||
|
}
|
||||||
|
|
||||||
|
log_debug(f"Calling API: {url[:50]}...")
|
||||||
|
log_debug(f"Payload: {payload}")
|
||||||
|
|
||||||
|
response = requests.post(url, json=payload, headers={"Content-Type": "application/json"}, timeout=30)
|
||||||
|
response_data = response.json()
|
||||||
|
|
||||||
|
log_debug(f"API Response: {response.status_code} - {response_data}")
|
||||||
|
|
||||||
|
if response.status_code == 200 and response_data.get('is_success', False):
|
||||||
|
print(f"{alarm_id},{status_name},SUCCESS,Updated successfully," + str(int(time.time())))
|
||||||
|
|
||||||
|
# Map status to SOCRadar API format
|
||||||
|
api_status_map = {
|
||||||
|
'Open': 'OPEN',
|
||||||
|
'OnHold-Investigating': 'INVESTIGATING',
|
||||||
|
'Closed-Resolved': 'RESOLVED',
|
||||||
|
'OnHold-Pending': 'PENDING_INFO',
|
||||||
|
'OnHold-Legal': 'LEGAL',
|
||||||
|
'Closed-FalsePositive': 'FALSE_POSITIVE',
|
||||||
|
'Closed-Duplicate': 'DUPLICATE',
|
||||||
|
'Closed-ProcessedInternally': 'PROCESSED_INTERNALLY',
|
||||||
|
'Closed-Mitigated': 'MITIGATED',
|
||||||
|
'Closed-NotApplicable': 'NOT_APPLICABLE'
|
||||||
|
}
|
||||||
|
|
||||||
|
api_status = api_status_map.get(status_name, status_name)
|
||||||
|
|
||||||
|
# Update checkpoint
|
||||||
|
try:
|
||||||
|
checkpoint_file = "/opt/splunk/var/lib/splunk/modinputs/socradar_incidents_collector/deneme_002_socradar_v4_processed_alarms"
|
||||||
|
|
||||||
|
if os.path.exists(checkpoint_file):
|
||||||
|
with open(checkpoint_file, 'r') as f:
|
||||||
|
checkpoint_data = json.load(f)
|
||||||
|
|
||||||
|
if 'alarm_status' in checkpoint_data:
|
||||||
|
checkpoint_data['alarm_status'][str(alarm_id)] = api_status
|
||||||
|
checkpoint_data['last_updated'] = datetime.now().isoformat()
|
||||||
|
|
||||||
|
with open(checkpoint_file, 'w') as f:
|
||||||
|
json.dump(checkpoint_data, f)
|
||||||
|
log_debug(f"Updated checkpoint: {alarm_id} -> {api_status}")
|
||||||
|
except Exception as e:
|
||||||
|
log_debug(f"Checkpoint update error: {e}")
|
||||||
|
|
||||||
|
# Index the status change immediately using REST API
|
||||||
|
try:
|
||||||
|
log_debug("Attempting to index status change...")
|
||||||
|
|
||||||
|
# Create event
|
||||||
|
event = {
|
||||||
|
"time": time.time(),
|
||||||
|
"host": "socradar",
|
||||||
|
"source": "status_update",
|
||||||
|
"sourcetype": "socradar:incidents",
|
||||||
|
"index": "socradar_incidents",
|
||||||
|
"event": json.dumps({
|
||||||
|
"alarm_id": alarm_id,
|
||||||
|
"status": api_status,
|
||||||
|
"alarm_status": api_status,
|
||||||
|
"date": datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
|
||||||
|
"update_time": datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
|
||||||
|
"update_source": "manual_status_update",
|
||||||
|
"_time": time.time()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
# Use Splunk REST API to index
|
||||||
|
import urllib.request
|
||||||
|
import urllib.parse
|
||||||
|
import base64
|
||||||
|
|
||||||
|
# Use HTTP Event Collector endpoint with index specification
|
||||||
|
url = "https://localhost:8089/services/receivers/simple?index=socradar_incidents&sourcetype=socradar:incidents"
|
||||||
|
data = event['event']
|
||||||
|
|
||||||
|
# Create request
|
||||||
|
req = urllib.request.Request(
|
||||||
|
url,
|
||||||
|
data=data.encode('utf-8'),
|
||||||
|
headers={
|
||||||
|
'Authorization': 'Basic ' + base64.b64encode(b'admin:123456789').decode('ascii'),
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Disable SSL verification for localhost
|
||||||
|
import ssl
|
||||||
|
ctx = ssl.create_default_context()
|
||||||
|
ctx.check_hostname = False
|
||||||
|
ctx.verify_mode = ssl.CERT_NONE
|
||||||
|
|
||||||
|
# Send request
|
||||||
|
with urllib.request.urlopen(req, context=ctx) as response:
|
||||||
|
result = response.read().decode('utf-8')
|
||||||
|
log_debug(f"Index result: {result}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
log_debug(f"Indexing error: {e}")
|
||||||
|
|
||||||
|
# Fallback to oneshot
|
||||||
|
try:
|
||||||
|
# Get the full alarm data from collector's last run
|
||||||
|
event_data = {
|
||||||
|
"alarm_id": alarm_id,
|
||||||
|
"status": api_status,
|
||||||
|
"alarm_status": api_status,
|
||||||
|
"date": datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
|
||||||
|
"update_time": datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
|
||||||
|
"update_source": "manual_status_update",
|
||||||
|
"_time": time.time(),
|
||||||
|
"alarm_type": "incident",
|
||||||
|
"alarm_risk_level": "UNKNOWN"
|
||||||
|
}
|
||||||
|
event_json = json.dumps(event_data)
|
||||||
|
|
||||||
|
cmd = ['/opt/splunk/bin/splunk', 'add', 'oneshot', '-',
|
||||||
|
'-sourcetype', 'socradar:incidents',
|
||||||
|
'-index', 'socradar_incidents',
|
||||||
|
'-auth', 'admin:123456789']
|
||||||
|
|
||||||
|
proc = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
stdout, stderr = proc.communicate(input=event_json.encode())
|
||||||
|
log_debug(f"Oneshot result: {proc.returncode}, stdout: {stdout}, stderr: {stderr}")
|
||||||
|
except Exception as e2:
|
||||||
|
log_debug(f"Oneshot error: {e2}")
|
||||||
|
|
||||||
|
else:
|
||||||
|
msg = response_data.get('message', 'Update failed').replace(',', ';')
|
||||||
|
print(f"{alarm_id},{status_name},FAILED,{msg}," + str(int(time.time())))
|
||||||
|
log_debug(f"Update failed: {msg}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
log_debug(f"EXCEPTION: {e}")
|
||||||
|
print(f"ERROR,Error,ERROR,{str(e).replace(',', ';')}," + str(int(time.time())))
|
||||||
@ -0,0 +1 @@
|
|||||||
|
pip
|
||||||
@ -0,0 +1,19 @@
|
|||||||
|
Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file>.
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||||
|
this software and associated documentation files (the "Software"), to deal in
|
||||||
|
the Software without restriction, including without limitation the rights to
|
||||||
|
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||||
|
of the Software, and to permit persons to whom the Software is furnished to do
|
||||||
|
so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
@ -0,0 +1,87 @@
|
|||||||
|
Metadata-Version: 2.1
|
||||||
|
Name: Mako
|
||||||
|
Version: 1.2.4
|
||||||
|
Summary: A super-fast templating language that borrows the best ideas from the existing templating languages.
|
||||||
|
Home-page: https://www.makotemplates.org/
|
||||||
|
Author: Mike Bayer
|
||||||
|
Author-email: mike@zzzcomputing.com
|
||||||
|
License: MIT
|
||||||
|
Project-URL: Documentation, https://docs.makotemplates.org
|
||||||
|
Project-URL: Issue Tracker, https://github.com/sqlalchemy/mako
|
||||||
|
Classifier: Development Status :: 5 - Production/Stable
|
||||||
|
Classifier: License :: OSI Approved :: MIT License
|
||||||
|
Classifier: Environment :: Web Environment
|
||||||
|
Classifier: Intended Audience :: Developers
|
||||||
|
Classifier: Programming Language :: Python
|
||||||
|
Classifier: Programming Language :: Python :: 3
|
||||||
|
Classifier: Programming Language :: Python :: 3.7
|
||||||
|
Classifier: Programming Language :: Python :: 3.8
|
||||||
|
Classifier: Programming Language :: Python :: 3.9
|
||||||
|
Classifier: Programming Language :: Python :: 3.10
|
||||||
|
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||||
|
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||||
|
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
|
||||||
|
Requires-Python: >=3.7
|
||||||
|
Description-Content-Type: text/x-rst
|
||||||
|
License-File: LICENSE
|
||||||
|
Requires-Dist: MarkupSafe (>=0.9.2)
|
||||||
|
Requires-Dist: importlib-metadata ; python_version < "3.8"
|
||||||
|
Provides-Extra: babel
|
||||||
|
Requires-Dist: Babel ; extra == 'babel'
|
||||||
|
Provides-Extra: lingua
|
||||||
|
Requires-Dist: lingua ; extra == 'lingua'
|
||||||
|
Provides-Extra: testing
|
||||||
|
Requires-Dist: pytest ; extra == 'testing'
|
||||||
|
|
||||||
|
=========================
|
||||||
|
Mako Templates for Python
|
||||||
|
=========================
|
||||||
|
|
||||||
|
Mako is a template library written in Python. It provides a familiar, non-XML
|
||||||
|
syntax which compiles into Python modules for maximum performance. Mako's
|
||||||
|
syntax and API borrows from the best ideas of many others, including Django
|
||||||
|
templates, Cheetah, Myghty, and Genshi. Conceptually, Mako is an embedded
|
||||||
|
Python (i.e. Python Server Page) language, which refines the familiar ideas
|
||||||
|
of componentized layout and inheritance to produce one of the most
|
||||||
|
straightforward and flexible models available, while also maintaining close
|
||||||
|
ties to Python calling and scoping semantics.
|
||||||
|
|
||||||
|
Nutshell
|
||||||
|
========
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
<%inherit file="base.html"/>
|
||||||
|
<%
|
||||||
|
rows = [[v for v in range(0,10)] for row in range(0,10)]
|
||||||
|
%>
|
||||||
|
<table>
|
||||||
|
% for row in rows:
|
||||||
|
${makerow(row)}
|
||||||
|
% endfor
|
||||||
|
</table>
|
||||||
|
|
||||||
|
<%def name="makerow(row)">
|
||||||
|
<tr>
|
||||||
|
% for name in row:
|
||||||
|
<td>${name}</td>\
|
||||||
|
% endfor
|
||||||
|
</tr>
|
||||||
|
</%def>
|
||||||
|
|
||||||
|
Philosophy
|
||||||
|
===========
|
||||||
|
|
||||||
|
Python is a great scripting language. Don't reinvent the wheel...your templates can handle it !
|
||||||
|
|
||||||
|
Documentation
|
||||||
|
==============
|
||||||
|
|
||||||
|
See documentation for Mako at https://docs.makotemplates.org/en/latest/
|
||||||
|
|
||||||
|
License
|
||||||
|
========
|
||||||
|
|
||||||
|
Mako is licensed under an MIT-style license (see LICENSE).
|
||||||
|
Other incorporated projects may be licensed under different licenses.
|
||||||
|
All licenses allow for non-commercial and commercial use.
|
||||||
@ -0,0 +1,42 @@
|
|||||||
|
../../bin/mako-render,sha256=jDAytBvt-We4snJ-6W-_gXavxsSTjPKgK86UFb0ZDR8,216
|
||||||
|
Mako-1.2.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||||
|
Mako-1.2.4.dist-info/LICENSE,sha256=dg8is-nqSlDrmSAb2N0RiGnygQjPtkzM5tGzBc-a6fo,1098
|
||||||
|
Mako-1.2.4.dist-info/METADATA,sha256=MlPkZcQ5bASEMtzkRaH8aRSQE6gmLH3KTnASUawz6eA,2909
|
||||||
|
Mako-1.2.4.dist-info/RECORD,,
|
||||||
|
Mako-1.2.4.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
Mako-1.2.4.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
|
||||||
|
Mako-1.2.4.dist-info/entry_points.txt,sha256=LsKkUsOsJQYbJ2M72hZCm968wi5K8Ywb5uFxCuN8Obk,512
|
||||||
|
Mako-1.2.4.dist-info/top_level.txt,sha256=LItdH8cDPetpUu8rUyBG3DObS6h9Gcpr9j_WLj2S-R0,5
|
||||||
|
mako/__init__.py,sha256=R1cQoVGhYA-fl43kNSPKm6kzdJOs28e8sq8WYMHctMQ,242
|
||||||
|
mako/_ast_util.py,sha256=BcwJLuE4E-aiFXi_fanO378Cn3Ou03bJxc6Incjse4Y,20247
|
||||||
|
mako/ast.py,sha256=h07xBpz2l19RSwpejrhkhgB4r5efpwGmsYOy_L8xvUc,6642
|
||||||
|
mako/cache.py,sha256=jkspun9tLgu0IVKSmo_fkL_DAbSTl2P5a5zkMBkjZvk,7680
|
||||||
|
mako/cmd.py,sha256=vQg9ip89KMsuZEGamCRAPg7UyDNlpMmnG3XHDNLHS5o,2814
|
||||||
|
mako/codegen.py,sha256=h1z8DGLkB92nbUz2OZGVmUKqPr9kVNbnNL8KnLizYAk,47309
|
||||||
|
mako/compat.py,sha256=Sa3Rzrjl44xo25nXUHbhfIrEoMgceq5-Ohl0FO6cCHk,1913
|
||||||
|
mako/exceptions.py,sha256=xQZKYdb-4d8rcrNFsFzjGSEuNG4upFqGNPErtSCDqfI,12530
|
||||||
|
mako/ext/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
mako/ext/autohandler.py,sha256=-hNv4VHbQplLGDt5e4mFsBC-QpfWMjKokOe0axDP308,1885
|
||||||
|
mako/ext/babelplugin.py,sha256=s6ZIAh1hUhsJIiF3j4soVHrFN_1cRJ_e3sEbz7ein7k,2091
|
||||||
|
mako/ext/beaker_cache.py,sha256=D6gh_ke7QOKiSJtq9v67RvmqCRMDJx-IwTcd-NDjKvk,2578
|
||||||
|
mako/ext/extract.py,sha256=EhXglj2eW5u80T3xWWB7jMgL8oNDfAQaD5E5IRiL9N0,4659
|
||||||
|
mako/ext/linguaplugin.py,sha256=iLip2gZ0ya5pooHrxwZrP8VFQfJidXmgPZ5h1j30Kow,1935
|
||||||
|
mako/ext/preprocessors.py,sha256=pEUbmfSO2zb4DuCt_-_oYnWypWiXs4MnJHxjTMiks5A,576
|
||||||
|
mako/ext/pygmentplugin.py,sha256=GuOd93TjetzpTfW5oUEtuPS7jKDHgJIH3Faiaq76S0c,4753
|
||||||
|
mako/ext/turbogears.py,sha256=mxFDF59NFK6cm__3qwGjZ1VAW0qdjJWNj23l6dcwqEg,2141
|
||||||
|
mako/filters.py,sha256=rlHJ2L5RFr5Gf-MyOJKZI7TSJpM5oBXH58niJWCp2-4,4658
|
||||||
|
mako/lexer.py,sha256=GOHNLeSlTIEa_yV8W5Qr27SjaPlJcO0Kij7Z2rpUkCA,15982
|
||||||
|
mako/lookup.py,sha256=_2VPSA2CgCiT0Vd9GnSIjyY5wlpXiB2C5luXJP7gym8,12429
|
||||||
|
mako/parsetree.py,sha256=pXbZP0orsT3iBIgWa9yD1TEfvytsCaXu2Ttws8RTMGM,19007
|
||||||
|
mako/pygen.py,sha256=K-l_hsvXfWdMTunfHyVxvA5EG4Uzr4Qaw6IUc3hw8zI,10416
|
||||||
|
mako/pyparser.py,sha256=diSXgo-ZwdZxbRsNZ1DmARQKVnlOFc6Qgx9Dc3wZB_U,7032
|
||||||
|
mako/runtime.py,sha256=MwO5T1rGy0yLeJiFh2hh5cO_kfd5_9fJq_vfBzLFe_0,27806
|
||||||
|
mako/template.py,sha256=gEhMPjKZ1Q_sYWWg6PLnRX-KBeTF0kBnyRZimlmgQks,23858
|
||||||
|
mako/testing/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
mako/testing/_config.py,sha256=k-qpnsnbXUoN-ykMN5BRpg84i1x0p6UsAddKQnrIytU,3566
|
||||||
|
mako/testing/assertions.py,sha256=XnYDPSnDFiEX9eO95OZ5LndZrUpJ6_xGofe6qDzJxqU,5162
|
||||||
|
mako/testing/config.py,sha256=wmYVZfzGvOK3mJUZpzmgO8-iIgvaCH41Woi4yDpxq6E,323
|
||||||
|
mako/testing/exclusions.py,sha256=_t6ADKdatk3f18tOfHV_ZY6u_ZwQsKphZ2MXJVSAOcI,1553
|
||||||
|
mako/testing/fixtures.py,sha256=nEp7wTusf7E0n3Q-BHJW2s_t1vx0KB9poadQ1BmIJzE,3044
|
||||||
|
mako/testing/helpers.py,sha256=kTaIg8OL1uvcuLptbRA_aJtGndIDDaxAzacYbv_Km1Q,1521
|
||||||
|
mako/util.py,sha256=XmYQmq6WfMAt-BPM7zhT9lybEqHVIWCM9wF1ukzqpew,10638
|
||||||
@ -0,0 +1,5 @@
|
|||||||
|
Wheel-Version: 1.0
|
||||||
|
Generator: bdist_wheel (0.38.4)
|
||||||
|
Root-Is-Purelib: true
|
||||||
|
Tag: py3-none-any
|
||||||
|
|
||||||
@ -0,0 +1 @@
|
|||||||
|
pip
|
||||||
@ -0,0 +1,28 @@
|
|||||||
|
Copyright 2010 Pallets
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions are
|
||||||
|
met:
|
||||||
|
|
||||||
|
1. Redistributions of source code must retain the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer.
|
||||||
|
|
||||||
|
2. Redistributions in binary form must reproduce the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer in the
|
||||||
|
documentation and/or other materials provided with the distribution.
|
||||||
|
|
||||||
|
3. Neither the name of the copyright holder nor the names of its
|
||||||
|
contributors may be used to endorse or promote products derived from
|
||||||
|
this software without specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
||||||
|
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||||
|
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||||
|
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||||
|
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
@ -0,0 +1,101 @@
|
|||||||
|
Metadata-Version: 2.1
|
||||||
|
Name: MarkupSafe
|
||||||
|
Version: 2.0.1
|
||||||
|
Summary: Safely add untrusted strings to HTML/XML markup.
|
||||||
|
Home-page: https://palletsprojects.com/p/markupsafe/
|
||||||
|
Author: Armin Ronacher
|
||||||
|
Author-email: armin.ronacher@active-4.com
|
||||||
|
Maintainer: Pallets
|
||||||
|
Maintainer-email: contact@palletsprojects.com
|
||||||
|
License: BSD-3-Clause
|
||||||
|
Project-URL: Donate, https://palletsprojects.com/donate
|
||||||
|
Project-URL: Documentation, https://markupsafe.palletsprojects.com/
|
||||||
|
Project-URL: Changes, https://markupsafe.palletsprojects.com/changes/
|
||||||
|
Project-URL: Source Code, https://github.com/pallets/markupsafe/
|
||||||
|
Project-URL: Issue Tracker, https://github.com/pallets/markupsafe/issues/
|
||||||
|
Project-URL: Twitter, https://twitter.com/PalletsTeam
|
||||||
|
Project-URL: Chat, https://discord.gg/pallets
|
||||||
|
Platform: UNKNOWN
|
||||||
|
Classifier: Development Status :: 5 - Production/Stable
|
||||||
|
Classifier: Environment :: Web Environment
|
||||||
|
Classifier: Intended Audience :: Developers
|
||||||
|
Classifier: License :: OSI Approved :: BSD License
|
||||||
|
Classifier: Operating System :: OS Independent
|
||||||
|
Classifier: Programming Language :: Python
|
||||||
|
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
|
||||||
|
Classifier: Topic :: Text Processing :: Markup :: HTML
|
||||||
|
Requires-Python: >=3.6
|
||||||
|
Description-Content-Type: text/x-rst
|
||||||
|
License-File: LICENSE.rst
|
||||||
|
|
||||||
|
MarkupSafe
|
||||||
|
==========
|
||||||
|
|
||||||
|
MarkupSafe implements a text object that escapes characters so it is
|
||||||
|
safe to use in HTML and XML. Characters that have special meanings are
|
||||||
|
replaced so that they display as the actual characters. This mitigates
|
||||||
|
injection attacks, meaning untrusted user input can safely be displayed
|
||||||
|
on a page.
|
||||||
|
|
||||||
|
|
||||||
|
Installing
|
||||||
|
----------
|
||||||
|
|
||||||
|
Install and update using `pip`_:
|
||||||
|
|
||||||
|
.. code-block:: text
|
||||||
|
|
||||||
|
pip install -U MarkupSafe
|
||||||
|
|
||||||
|
.. _pip: https://pip.pypa.io/en/stable/quickstart/
|
||||||
|
|
||||||
|
|
||||||
|
Examples
|
||||||
|
--------
|
||||||
|
|
||||||
|
.. code-block:: pycon
|
||||||
|
|
||||||
|
>>> from markupsafe import Markup, escape
|
||||||
|
|
||||||
|
>>> # escape replaces special characters and wraps in Markup
|
||||||
|
>>> escape("<script>alert(document.cookie);</script>")
|
||||||
|
Markup('<script>alert(document.cookie);</script>')
|
||||||
|
|
||||||
|
>>> # wrap in Markup to mark text "safe" and prevent escaping
|
||||||
|
>>> Markup("<strong>Hello</strong>")
|
||||||
|
Markup('<strong>hello</strong>')
|
||||||
|
|
||||||
|
>>> escape(Markup("<strong>Hello</strong>"))
|
||||||
|
Markup('<strong>hello</strong>')
|
||||||
|
|
||||||
|
>>> # Markup is a str subclass
|
||||||
|
>>> # methods and operators escape their arguments
|
||||||
|
>>> template = Markup("Hello <em>{name}</em>")
|
||||||
|
>>> template.format(name='"World"')
|
||||||
|
Markup('Hello <em>"World"</em>')
|
||||||
|
|
||||||
|
|
||||||
|
Donate
|
||||||
|
------
|
||||||
|
|
||||||
|
The Pallets organization develops and supports MarkupSafe and other
|
||||||
|
popular packages. In order to grow the community of contributors and
|
||||||
|
users, and allow the maintainers to devote more time to the projects,
|
||||||
|
`please donate today`_.
|
||||||
|
|
||||||
|
.. _please donate today: https://palletsprojects.com/donate
|
||||||
|
|
||||||
|
|
||||||
|
Links
|
||||||
|
-----
|
||||||
|
|
||||||
|
- Documentation: https://markupsafe.palletsprojects.com/
|
||||||
|
- Changes: https://markupsafe.palletsprojects.com/changes/
|
||||||
|
- PyPI Releases: https://pypi.org/project/MarkupSafe/
|
||||||
|
- Source Code: https://github.com/pallets/markupsafe/
|
||||||
|
- Issue Tracker: https://github.com/pallets/markupsafe/issues/
|
||||||
|
- Website: https://palletsprojects.com/p/markupsafe/
|
||||||
|
- Twitter: https://twitter.com/PalletsTeam
|
||||||
|
- Chat: https://discord.gg/pallets
|
||||||
|
|
||||||
|
|
||||||
@ -0,0 +1,13 @@
|
|||||||
|
MarkupSafe-2.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||||
|
MarkupSafe-2.0.1.dist-info/LICENSE.rst,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475
|
||||||
|
MarkupSafe-2.0.1.dist-info/METADATA,sha256=lknelt-VPHWai5EJcvZpATGKVbXkg74h7CQuPwDS71U,3237
|
||||||
|
MarkupSafe-2.0.1.dist-info/RECORD,,
|
||||||
|
MarkupSafe-2.0.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
MarkupSafe-2.0.1.dist-info/WHEEL,sha256=T7Cp5xu87yB0VfKahSR3N0JT_FVycX4pq6-fNwtW39g,221
|
||||||
|
MarkupSafe-2.0.1.dist-info/top_level.txt,sha256=qy0Plje5IJuvsCBjejJyhDCjEAdcDLK_2agVcex8Z6U,11
|
||||||
|
markupsafe/__init__.py,sha256=9Tez4UIlI7J6_sQcUFK1dKniT_b_8YefpGIyYJ3Sr2Q,8923
|
||||||
|
markupsafe/_native.py,sha256=GTKEV-bWgZuSjklhMHOYRHU9k0DMewTf5mVEZfkbuns,1986
|
||||||
|
markupsafe/_speedups.c,sha256=CDDtwaV21D2nYtypnMQzxvvpZpcTvIs8OZ6KDa1g4t0,7400
|
||||||
|
markupsafe/_speedups.cpython-37m-x86_64-linux-gnu.so,sha256=-pXHCix2zJNQJAxiYofThwxd31HBcNtF5a-bsrUgcuc,53568
|
||||||
|
markupsafe/_speedups.pyi,sha256=vfMCsOgbAXRNLUXkyuyonG8uEWKYU4PDqNuMaDELAYw,229
|
||||||
|
markupsafe/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
@ -0,0 +1,8 @@
|
|||||||
|
Wheel-Version: 1.0
|
||||||
|
Generator: bdist_wheel (0.36.2)
|
||||||
|
Root-Is-Purelib: false
|
||||||
|
Tag: cp37-cp37m-manylinux_2_5_x86_64
|
||||||
|
Tag: cp37-cp37m-manylinux1_x86_64
|
||||||
|
Tag: cp37-cp37m-manylinux_2_12_x86_64
|
||||||
|
Tag: cp37-cp37m-manylinux2010_x86_64
|
||||||
|
|
||||||
@ -0,0 +1 @@
|
|||||||
|
pip
|
||||||
@ -0,0 +1,22 @@
|
|||||||
|
Copyright 2006 Dan-Haim. All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
1. Redistributions of source code must retain the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer.
|
||||||
|
2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
this list of conditions and the following disclaimer in the documentation
|
||||||
|
and/or other materials provided with the distribution.
|
||||||
|
3. Neither the name of Dan Haim nor the names of his contributors may be used
|
||||||
|
to endorse or promote products derived from this software without specific
|
||||||
|
prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY DAN HAIM "AS IS" AND ANY EXPRESS OR IMPLIED
|
||||||
|
WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||||
|
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
|
||||||
|
EVENT SHALL DAN HAIM OR HIS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||||
|
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA
|
||||||
|
OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||||
|
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
|
||||||
|
OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMANGE.
|
||||||
@ -0,0 +1,321 @@
|
|||||||
|
Metadata-Version: 2.1
|
||||||
|
Name: PySocks
|
||||||
|
Version: 1.7.1
|
||||||
|
Summary: A Python SOCKS client module. See https://github.com/Anorov/PySocks for more information.
|
||||||
|
Home-page: https://github.com/Anorov/PySocks
|
||||||
|
Author: Anorov
|
||||||
|
Author-email: anorov.vorona@gmail.com
|
||||||
|
License: BSD
|
||||||
|
Keywords: socks,proxy
|
||||||
|
Platform: UNKNOWN
|
||||||
|
Classifier: Programming Language :: Python :: 2
|
||||||
|
Classifier: Programming Language :: Python :: 2.7
|
||||||
|
Classifier: Programming Language :: Python :: 3
|
||||||
|
Classifier: Programming Language :: Python :: 3.4
|
||||||
|
Classifier: Programming Language :: Python :: 3.5
|
||||||
|
Classifier: Programming Language :: Python :: 3.6
|
||||||
|
Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*
|
||||||
|
Description-Content-Type: text/markdown
|
||||||
|
|
||||||
|
PySocks
|
||||||
|
=======
|
||||||
|
|
||||||
|
PySocks lets you send traffic through SOCKS and HTTP proxy servers. It is a modern fork of [SocksiPy](http://socksipy.sourceforge.net/) with bug fixes and extra features.
|
||||||
|
|
||||||
|
Acts as a drop-in replacement to the socket module. Seamlessly configure SOCKS proxies for any socket object by calling `socket_object.set_proxy()`.
|
||||||
|
|
||||||
|
----------------
|
||||||
|
|
||||||
|
Features
|
||||||
|
========
|
||||||
|
|
||||||
|
* SOCKS proxy client for Python 2.7 and 3.4+
|
||||||
|
* TCP supported
|
||||||
|
* UDP mostly supported (issues may occur in some edge cases)
|
||||||
|
* HTTP proxy client included but not supported or recommended (you should use urllib2's or requests' own HTTP proxy interface)
|
||||||
|
* urllib2 handler included. `pip install` / `setup.py install` will automatically install the `sockshandler` module.
|
||||||
|
|
||||||
|
Installation
|
||||||
|
============
|
||||||
|
|
||||||
|
pip install PySocks
|
||||||
|
|
||||||
|
Or download the tarball / `git clone` and...
|
||||||
|
|
||||||
|
python setup.py install
|
||||||
|
|
||||||
|
These will install both the `socks` and `sockshandler` modules.
|
||||||
|
|
||||||
|
Alternatively, include just `socks.py` in your project.
|
||||||
|
|
||||||
|
--------------------------------------------
|
||||||
|
|
||||||
|
*Warning:* PySocks/SocksiPy only supports HTTP proxies that use CONNECT tunneling. Certain HTTP proxies may not work with this library. If you wish to use HTTP (not SOCKS) proxies, it is recommended that you rely on your HTTP client's native proxy support (`proxies` dict for `requests`, or `urllib2.ProxyHandler` for `urllib2`) instead.
|
||||||
|
|
||||||
|
--------------------------------------------
|
||||||
|
|
||||||
|
Usage
|
||||||
|
=====
|
||||||
|
|
||||||
|
## socks.socksocket ##
|
||||||
|
|
||||||
|
import socks
|
||||||
|
|
||||||
|
s = socks.socksocket() # Same API as socket.socket in the standard lib
|
||||||
|
|
||||||
|
s.set_proxy(socks.SOCKS5, "localhost") # SOCKS4 and SOCKS5 use port 1080 by default
|
||||||
|
# Or
|
||||||
|
s.set_proxy(socks.SOCKS4, "localhost", 4444)
|
||||||
|
# Or
|
||||||
|
s.set_proxy(socks.HTTP, "5.5.5.5", 8888)
|
||||||
|
|
||||||
|
# Can be treated identical to a regular socket object
|
||||||
|
s.connect(("www.somesite.com", 80))
|
||||||
|
s.sendall("GET / HTTP/1.1 ...")
|
||||||
|
print s.recv(4096)
|
||||||
|
|
||||||
|
## Monkeypatching ##
|
||||||
|
|
||||||
|
To monkeypatch the entire standard library with a single default proxy:
|
||||||
|
|
||||||
|
import urllib2
|
||||||
|
import socket
|
||||||
|
import socks
|
||||||
|
|
||||||
|
socks.set_default_proxy(socks.SOCKS5, "localhost")
|
||||||
|
socket.socket = socks.socksocket
|
||||||
|
|
||||||
|
urllib2.urlopen("http://www.somesite.com/") # All requests will pass through the SOCKS proxy
|
||||||
|
|
||||||
|
Note that monkeypatching may not work for all standard modules or for all third party modules, and generally isn't recommended. Monkeypatching is usually an anti-pattern in Python.
|
||||||
|
|
||||||
|
## urllib2 Handler ##
|
||||||
|
|
||||||
|
Example use case with the `sockshandler` urllib2 handler. Note that you must import both `socks` and `sockshandler`, as the handler is its own module separate from PySocks. The module is included in the PyPI package.
|
||||||
|
|
||||||
|
import urllib2
|
||||||
|
import socks
|
||||||
|
from sockshandler import SocksiPyHandler
|
||||||
|
|
||||||
|
opener = urllib2.build_opener(SocksiPyHandler(socks.SOCKS5, "127.0.0.1", 9050))
|
||||||
|
print opener.open("http://www.somesite.com/") # All requests made by the opener will pass through the SOCKS proxy
|
||||||
|
|
||||||
|
--------------------------------------------
|
||||||
|
|
||||||
|
Original SocksiPy README attached below, amended to reflect API changes.
|
||||||
|
|
||||||
|
--------------------------------------------
|
||||||
|
|
||||||
|
SocksiPy
|
||||||
|
|
||||||
|
A Python SOCKS module.
|
||||||
|
|
||||||
|
(C) 2006 Dan-Haim. All rights reserved.
|
||||||
|
|
||||||
|
See LICENSE file for details.
|
||||||
|
|
||||||
|
|
||||||
|
*WHAT IS A SOCKS PROXY?*
|
||||||
|
|
||||||
|
A SOCKS proxy is a proxy server at the TCP level. In other words, it acts as
|
||||||
|
a tunnel, relaying all traffic going through it without modifying it.
|
||||||
|
SOCKS proxies can be used to relay traffic using any network protocol that
|
||||||
|
uses TCP.
|
||||||
|
|
||||||
|
*WHAT IS SOCKSIPY?*
|
||||||
|
|
||||||
|
This Python module allows you to create TCP connections through a SOCKS
|
||||||
|
proxy without any special effort.
|
||||||
|
It also supports relaying UDP packets with a SOCKS5 proxy.
|
||||||
|
|
||||||
|
*PROXY COMPATIBILITY*
|
||||||
|
|
||||||
|
SocksiPy is compatible with three different types of proxies:
|
||||||
|
|
||||||
|
1. SOCKS Version 4 (SOCKS4), including the SOCKS4a extension.
|
||||||
|
2. SOCKS Version 5 (SOCKS5).
|
||||||
|
3. HTTP Proxies which support tunneling using the CONNECT method.
|
||||||
|
|
||||||
|
*SYSTEM REQUIREMENTS*
|
||||||
|
|
||||||
|
Being written in Python, SocksiPy can run on any platform that has a Python
|
||||||
|
interpreter and TCP/IP support.
|
||||||
|
This module has been tested with Python 2.3 and should work with greater versions
|
||||||
|
just as well.
|
||||||
|
|
||||||
|
|
||||||
|
INSTALLATION
|
||||||
|
-------------
|
||||||
|
|
||||||
|
Simply copy the file "socks.py" to your Python's `lib/site-packages` directory,
|
||||||
|
and you're ready to go. [Editor's note: it is better to use `python setup.py install` for PySocks]
|
||||||
|
|
||||||
|
|
||||||
|
USAGE
|
||||||
|
------
|
||||||
|
|
||||||
|
First load the socks module with the command:
|
||||||
|
|
||||||
|
>>> import socks
|
||||||
|
>>>
|
||||||
|
|
||||||
|
The socks module provides a class called `socksocket`, which is the base to all of the module's functionality.
|
||||||
|
|
||||||
|
The `socksocket` object has the same initialization parameters as the normal socket
|
||||||
|
object to ensure maximal compatibility, however it should be noted that `socksocket` will only function with family being `AF_INET` and
|
||||||
|
type being either `SOCK_STREAM` or `SOCK_DGRAM`.
|
||||||
|
Generally, it is best to initialize the `socksocket` object with no parameters
|
||||||
|
|
||||||
|
>>> s = socks.socksocket()
|
||||||
|
>>>
|
||||||
|
|
||||||
|
The `socksocket` object has an interface which is very similiar to socket's (in fact
|
||||||
|
the `socksocket` class is derived from socket) with a few extra methods.
|
||||||
|
To select the proxy server you would like to use, use the `set_proxy` method, whose
|
||||||
|
syntax is:
|
||||||
|
|
||||||
|
set_proxy(proxy_type, addr[, port[, rdns[, username[, password]]]])
|
||||||
|
|
||||||
|
Explanation of the parameters:
|
||||||
|
|
||||||
|
`proxy_type` - The type of the proxy server. This can be one of three possible
|
||||||
|
choices: `PROXY_TYPE_SOCKS4`, `PROXY_TYPE_SOCKS5` and `PROXY_TYPE_HTTP` for SOCKS4,
|
||||||
|
SOCKS5 and HTTP servers respectively. `SOCKS4`, `SOCKS5`, and `HTTP` are all aliases, respectively.
|
||||||
|
|
||||||
|
`addr` - The IP address or DNS name of the proxy server.
|
||||||
|
|
||||||
|
`port` - The port of the proxy server. Defaults to 1080 for socks and 8080 for http.
|
||||||
|
|
||||||
|
`rdns` - This is a boolean flag than modifies the behavior regarding DNS resolving.
|
||||||
|
If it is set to True, DNS resolving will be preformed remotely, on the server.
|
||||||
|
If it is set to False, DNS resolving will be preformed locally. Please note that
|
||||||
|
setting this to True with SOCKS4 servers actually use an extension to the protocol,
|
||||||
|
called SOCKS4a, which may not be supported on all servers (SOCKS5 and http servers
|
||||||
|
always support DNS). The default is True.
|
||||||
|
|
||||||
|
`username` - For SOCKS5 servers, this allows simple username / password authentication
|
||||||
|
with the server. For SOCKS4 servers, this parameter will be sent as the userid.
|
||||||
|
This parameter is ignored if an HTTP server is being used. If it is not provided,
|
||||||
|
authentication will not be used (servers may accept unauthenticated requests).
|
||||||
|
|
||||||
|
`password` - This parameter is valid only for SOCKS5 servers and specifies the
|
||||||
|
respective password for the username provided.
|
||||||
|
|
||||||
|
Example of usage:
|
||||||
|
|
||||||
|
>>> s.set_proxy(socks.SOCKS5, "socks.example.com") # uses default port 1080
|
||||||
|
>>> s.set_proxy(socks.SOCKS4, "socks.test.com", 1081)
|
||||||
|
|
||||||
|
After the set_proxy method has been called, simply call the connect method with the
|
||||||
|
traditional parameters to establish a connection through the proxy:
|
||||||
|
|
||||||
|
>>> s.connect(("www.sourceforge.net", 80))
|
||||||
|
>>>
|
||||||
|
|
||||||
|
Connection will take a bit longer to allow negotiation with the proxy server.
|
||||||
|
Please note that calling connect without calling `set_proxy` earlier will connect
|
||||||
|
without a proxy (just like a regular socket).
|
||||||
|
|
||||||
|
Errors: Any errors in the connection process will trigger exceptions. The exception
|
||||||
|
may either be generated by the underlying socket layer or may be custom module
|
||||||
|
exceptions, whose details follow:
|
||||||
|
|
||||||
|
class `ProxyError` - This is a base exception class. It is not raised directly but
|
||||||
|
rather all other exception classes raised by this module are derived from it.
|
||||||
|
This allows an easy way to catch all proxy-related errors. It descends from `IOError`.
|
||||||
|
|
||||||
|
All `ProxyError` exceptions have an attribute `socket_err`, which will contain either a
|
||||||
|
caught `socket.error` exception, or `None` if there wasn't any.
|
||||||
|
|
||||||
|
class `GeneralProxyError` - When thrown, it indicates a problem which does not fall
|
||||||
|
into another category.
|
||||||
|
|
||||||
|
* `Sent invalid data` - This error means that unexpected data has been received from
|
||||||
|
the server. The most common reason is that the server specified as the proxy is
|
||||||
|
not really a SOCKS4/SOCKS5/HTTP proxy, or maybe the proxy type specified is wrong.
|
||||||
|
|
||||||
|
* `Connection closed unexpectedly` - The proxy server unexpectedly closed the connection.
|
||||||
|
This may indicate that the proxy server is experiencing network or software problems.
|
||||||
|
|
||||||
|
* `Bad proxy type` - This will be raised if the type of the proxy supplied to the
|
||||||
|
set_proxy function was not one of `SOCKS4`/`SOCKS5`/`HTTP`.
|
||||||
|
|
||||||
|
* `Bad input` - This will be raised if the `connect()` method is called with bad input
|
||||||
|
parameters.
|
||||||
|
|
||||||
|
class `SOCKS5AuthError` - This indicates that the connection through a SOCKS5 server
|
||||||
|
failed due to an authentication problem.
|
||||||
|
|
||||||
|
* `Authentication is required` - This will happen if you use a SOCKS5 server which
|
||||||
|
requires authentication without providing a username / password at all.
|
||||||
|
|
||||||
|
* `All offered authentication methods were rejected` - This will happen if the proxy
|
||||||
|
requires a special authentication method which is not supported by this module.
|
||||||
|
|
||||||
|
* `Unknown username or invalid password` - Self descriptive.
|
||||||
|
|
||||||
|
class `SOCKS5Error` - This will be raised for SOCKS5 errors which are not related to
|
||||||
|
authentication.
|
||||||
|
The parameter is a tuple containing a code, as given by the server,
|
||||||
|
and a description of the
|
||||||
|
error. The possible errors, according to the RFC, are:
|
||||||
|
|
||||||
|
* `0x01` - General SOCKS server failure - If for any reason the proxy server is unable to
|
||||||
|
fulfill your request (internal server error).
|
||||||
|
* `0x02` - connection not allowed by ruleset - If the address you're trying to connect to
|
||||||
|
is blacklisted on the server or requires authentication.
|
||||||
|
* `0x03` - Network unreachable - The target could not be contacted. A router on the network
|
||||||
|
had replied with a destination net unreachable error.
|
||||||
|
* `0x04` - Host unreachable - The target could not be contacted. A router on the network
|
||||||
|
had replied with a destination host unreachable error.
|
||||||
|
* `0x05` - Connection refused - The target server has actively refused the connection
|
||||||
|
(the requested port is closed).
|
||||||
|
* `0x06` - TTL expired - The TTL value of the SYN packet from the proxy to the target server
|
||||||
|
has expired. This usually means that there are network problems causing the packet
|
||||||
|
to be caught in a router-to-router "ping-pong".
|
||||||
|
* `0x07` - Command not supported - For instance if the server does not support UDP.
|
||||||
|
* `0x08` - Address type not supported - The client has provided an invalid address type.
|
||||||
|
When using this module, this error should not occur.
|
||||||
|
|
||||||
|
class `SOCKS4Error` - This will be raised for SOCKS4 errors. The parameter is a tuple
|
||||||
|
containing a code and a description of the error, as given by the server. The
|
||||||
|
possible error, according to the specification are:
|
||||||
|
|
||||||
|
* `0x5B` - Request rejected or failed - Will be raised in the event of an failure for any
|
||||||
|
reason other then the two mentioned next.
|
||||||
|
* `0x5C` - request rejected because SOCKS server cannot connect to identd on the client -
|
||||||
|
The Socks server had tried an ident lookup on your computer and has failed. In this
|
||||||
|
case you should run an identd server and/or configure your firewall to allow incoming
|
||||||
|
connections to local port 113 from the remote server.
|
||||||
|
* `0x5D` - request rejected because the client program and identd report different user-ids -
|
||||||
|
The Socks server had performed an ident lookup on your computer and has received a
|
||||||
|
different userid than the one you have provided. Change your userid (through the
|
||||||
|
username parameter of the set_proxy method) to match and try again.
|
||||||
|
|
||||||
|
class `HTTPError` - This will be raised for HTTP errors. The message will contain
|
||||||
|
the HTTP status code and provided error message.
|
||||||
|
|
||||||
|
After establishing the connection, the object behaves like a standard socket.
|
||||||
|
|
||||||
|
Methods like `makefile()` and `settimeout()` should behave just like regular sockets.
|
||||||
|
Call the `close()` method to close the connection.
|
||||||
|
|
||||||
|
In addition to the `socksocket` class, an additional function worth mentioning is the
|
||||||
|
`set_default_proxy` function. The parameters are the same as the `set_proxy` method.
|
||||||
|
This function will set default proxy settings for newly created `socksocket` objects,
|
||||||
|
in which the proxy settings haven't been changed via the `set_proxy` method.
|
||||||
|
This is quite useful if you wish to force 3rd party modules to use a SOCKS proxy,
|
||||||
|
by overriding the socket object.
|
||||||
|
For example:
|
||||||
|
|
||||||
|
>>> socks.set_default_proxy(socks.SOCKS5, "socks.example.com")
|
||||||
|
>>> socket.socket = socks.socksocket
|
||||||
|
>>> urllib.urlopen("http://www.sourceforge.net/")
|
||||||
|
|
||||||
|
|
||||||
|
PROBLEMS
|
||||||
|
---------
|
||||||
|
|
||||||
|
Please open a GitHub issue at https://github.com/Anorov/PySocks
|
||||||
|
|
||||||
|
|
||||||
@ -0,0 +1,8 @@
|
|||||||
|
PySocks-1.7.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||||
|
PySocks-1.7.1.dist-info/LICENSE,sha256=cCfiFOAU63i3rcwc7aWspxOnn8T2oMUsnaWz5wfm_-k,1401
|
||||||
|
PySocks-1.7.1.dist-info/METADATA,sha256=zbQMizjPOOP4DhEiEX24XXjNrYuIxF9UGUpN0uFDB6Y,13235
|
||||||
|
PySocks-1.7.1.dist-info/RECORD,,
|
||||||
|
PySocks-1.7.1.dist-info/WHEEL,sha256=t_MpApv386-8PVts2R6wsTifdIn0vbUDTVv61IbqFC8,92
|
||||||
|
PySocks-1.7.1.dist-info/top_level.txt,sha256=TKSOIfCFBoK9EY8FBYbYqC3PWd3--G15ph9n8-QHPDk,19
|
||||||
|
socks.py,sha256=xOYn27t9IGrbTBzWsUUuPa0YBuplgiUykzkOB5V5iFY,31086
|
||||||
|
sockshandler.py,sha256=2SYGj-pwt1kjgLoZAmyeaEXCeZDWRmfVS_QG6kErGtY,3966
|
||||||
@ -0,0 +1,5 @@
|
|||||||
|
Wheel-Version: 1.0
|
||||||
|
Generator: bdist_wheel (0.33.3)
|
||||||
|
Root-Is-Purelib: true
|
||||||
|
Tag: py3-none-any
|
||||||
|
|
||||||
@ -0,0 +1 @@
|
|||||||
|
pip
|
||||||
@ -0,0 +1,20 @@
|
|||||||
|
Copyright (c) 2017-2021 Ingy döt Net
|
||||||
|
Copyright (c) 2006-2016 Kirill Simonov
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||||
|
this software and associated documentation files (the "Software"), to deal in
|
||||||
|
the Software without restriction, including without limitation the rights to
|
||||||
|
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||||
|
of the Software, and to permit persons to whom the Software is furnished to do
|
||||||
|
so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
@ -0,0 +1,46 @@
|
|||||||
|
Metadata-Version: 2.1
|
||||||
|
Name: PyYAML
|
||||||
|
Version: 6.0.1
|
||||||
|
Summary: YAML parser and emitter for Python
|
||||||
|
Home-page: https://pyyaml.org/
|
||||||
|
Download-URL: https://pypi.org/project/PyYAML/
|
||||||
|
Author: Kirill Simonov
|
||||||
|
Author-email: xi@resolvent.net
|
||||||
|
License: MIT
|
||||||
|
Project-URL: Bug Tracker, https://github.com/yaml/pyyaml/issues
|
||||||
|
Project-URL: CI, https://github.com/yaml/pyyaml/actions
|
||||||
|
Project-URL: Documentation, https://pyyaml.org/wiki/PyYAMLDocumentation
|
||||||
|
Project-URL: Mailing lists, http://lists.sourceforge.net/lists/listinfo/yaml-core
|
||||||
|
Project-URL: Source Code, https://github.com/yaml/pyyaml
|
||||||
|
Platform: Any
|
||||||
|
Classifier: Development Status :: 5 - Production/Stable
|
||||||
|
Classifier: Intended Audience :: Developers
|
||||||
|
Classifier: License :: OSI Approved :: MIT License
|
||||||
|
Classifier: Operating System :: OS Independent
|
||||||
|
Classifier: Programming Language :: Cython
|
||||||
|
Classifier: Programming Language :: Python
|
||||||
|
Classifier: Programming Language :: Python :: 3
|
||||||
|
Classifier: Programming Language :: Python :: 3.6
|
||||||
|
Classifier: Programming Language :: Python :: 3.7
|
||||||
|
Classifier: Programming Language :: Python :: 3.8
|
||||||
|
Classifier: Programming Language :: Python :: 3.9
|
||||||
|
Classifier: Programming Language :: Python :: 3.10
|
||||||
|
Classifier: Programming Language :: Python :: 3.11
|
||||||
|
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||||
|
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||||
|
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||||
|
Classifier: Topic :: Text Processing :: Markup
|
||||||
|
Requires-Python: >=3.6
|
||||||
|
License-File: LICENSE
|
||||||
|
|
||||||
|
YAML is a data serialization format designed for human readability
|
||||||
|
and interaction with scripting languages. PyYAML is a YAML parser
|
||||||
|
and emitter for Python.
|
||||||
|
|
||||||
|
PyYAML features a complete YAML 1.1 parser, Unicode support, pickle
|
||||||
|
support, capable extension API, and sensible error messages. PyYAML
|
||||||
|
supports standard YAML tags and provides Python-specific tags that
|
||||||
|
allow to represent an arbitrary Python object.
|
||||||
|
|
||||||
|
PyYAML is applicable for a broad range of tasks from complex
|
||||||
|
configuration files to object serialization and persistence.
|
||||||
@ -0,0 +1,26 @@
|
|||||||
|
PyYAML-6.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||||
|
PyYAML-6.0.1.dist-info/LICENSE,sha256=jTko-dxEkP1jVwfLiOsmvXZBAqcoKVQwfT5RZ6V36KQ,1101
|
||||||
|
PyYAML-6.0.1.dist-info/METADATA,sha256=UNNF8-SzzwOKXVo-kV5lXUGH2_wDWMBmGxqISpp5HQk,2058
|
||||||
|
PyYAML-6.0.1.dist-info/RECORD,,
|
||||||
|
PyYAML-6.0.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
PyYAML-6.0.1.dist-info/WHEEL,sha256=E0CkEo6W7NC93sVo8jj5Y2C0iOJBJ79Iyun5srY2VI0,150
|
||||||
|
PyYAML-6.0.1.dist-info/top_level.txt,sha256=rpj0IVMTisAjh_1vG3Ccf9v5jpCQwAz6cD1IVU5ZdhQ,11
|
||||||
|
_yaml/__init__.py,sha256=04Ae_5osxahpJHa3XBZUAf4wi6XX32gR8D6X6p64GEA,1402
|
||||||
|
yaml/__init__.py,sha256=bhl05qSeO-1ZxlSRjGrvl2m9nrXb1n9-GQatTN0Mrqc,12311
|
||||||
|
yaml/_yaml.cpython-37m-x86_64-linux-gnu.so,sha256=esho9rx9cSckIi84QOt04kVtI4LHNm_M81Sa55G-Rjg,2129400
|
||||||
|
yaml/composer.py,sha256=_Ko30Wr6eDWUeUpauUGT3Lcg9QPBnOPVlTnIMRGJ9FM,4883
|
||||||
|
yaml/constructor.py,sha256=kNgkfaeLUkwQYY_Q6Ff1Tz2XVw_pG1xVE9Ak7z-viLA,28639
|
||||||
|
yaml/cyaml.py,sha256=6ZrAG9fAYvdVe2FK_w0hmXoG7ZYsoYUwapG8CiC72H0,3851
|
||||||
|
yaml/dumper.py,sha256=PLctZlYwZLp7XmeUdwRuv4nYOZ2UBnDIUy8-lKfLF-o,2837
|
||||||
|
yaml/emitter.py,sha256=jghtaU7eFwg31bG0B7RZea_29Adi9CKmXq_QjgQpCkQ,43006
|
||||||
|
yaml/error.py,sha256=Ah9z-toHJUbE9j-M8YpxgSRM5CgLCcwVzJgLLRF2Fxo,2533
|
||||||
|
yaml/events.py,sha256=50_TksgQiE4up-lKo_V-nBy-tAIxkIPQxY5qDhKCeHw,2445
|
||||||
|
yaml/loader.py,sha256=UVa-zIqmkFSCIYq_PgSGm4NSJttHY2Rf_zQ4_b1fHN0,2061
|
||||||
|
yaml/nodes.py,sha256=gPKNj8pKCdh2d4gr3gIYINnPOaOxGhJAUiYhGRnPE84,1440
|
||||||
|
yaml/parser.py,sha256=ilWp5vvgoHFGzvOZDItFoGjD6D42nhlZrZyjAwa0oJo,25495
|
||||||
|
yaml/reader.py,sha256=0dmzirOiDG4Xo41RnuQS7K9rkY3xjHiVasfDMNTqCNw,6794
|
||||||
|
yaml/representer.py,sha256=IuWP-cAW9sHKEnS0gCqSa894k1Bg4cgTxaDwIcbRQ-Y,14190
|
||||||
|
yaml/resolver.py,sha256=9L-VYfm4mWHxUD1Vg4X7rjDRK_7VZd6b92wzq7Y2IKY,9004
|
||||||
|
yaml/scanner.py,sha256=YEM3iLZSaQwXcQRg2l2R4MdT0zGP2F9eHkKGKnHyWQY,51279
|
||||||
|
yaml/serializer.py,sha256=ChuFgmhU01hj4xgI8GaKv6vfM2Bujwa9i7d2FAHj7cA,4165
|
||||||
|
yaml/tokens.py,sha256=lTQIzSVw8Mg9wv459-TjiOQe6wVziqaRlqX2_89rp54,2573
|
||||||
@ -0,0 +1,6 @@
|
|||||||
|
Wheel-Version: 1.0
|
||||||
|
Generator: bdist_wheel (0.40.0)
|
||||||
|
Root-Is-Purelib: false
|
||||||
|
Tag: cp37-cp37m-manylinux_2_17_x86_64
|
||||||
|
Tag: cp37-cp37m-manylinux2014_x86_64
|
||||||
|
|
||||||
Binary file not shown.
@ -0,0 +1 @@
|
|||||||
|
__version__ = '0.19.3'
|
||||||
@ -0,0 +1,33 @@
|
|||||||
|
# This is a stub package designed to roughly emulate the _yaml
|
||||||
|
# extension module, which previously existed as a standalone module
|
||||||
|
# and has been moved into the `yaml` package namespace.
|
||||||
|
# It does not perfectly mimic its old counterpart, but should get
|
||||||
|
# close enough for anyone who's relying on it even when they shouldn't.
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
# in some circumstances, the yaml module we imoprted may be from a different version, so we need
|
||||||
|
# to tread carefully when poking at it here (it may not have the attributes we expect)
|
||||||
|
if not getattr(yaml, '__with_libyaml__', False):
|
||||||
|
from sys import version_info
|
||||||
|
|
||||||
|
exc = ModuleNotFoundError if version_info >= (3, 6) else ImportError
|
||||||
|
raise exc("No module named '_yaml'")
|
||||||
|
else:
|
||||||
|
from yaml._yaml import *
|
||||||
|
import warnings
|
||||||
|
warnings.warn(
|
||||||
|
'The _yaml extension module is now located at yaml._yaml'
|
||||||
|
' and its location is subject to change. To use the'
|
||||||
|
' LibYAML-based parser and emitter, import from `yaml`:'
|
||||||
|
' `from yaml import CLoader as Loader, CDumper as Dumper`.',
|
||||||
|
DeprecationWarning
|
||||||
|
)
|
||||||
|
del warnings
|
||||||
|
# Don't `del yaml` here because yaml is actually an existing
|
||||||
|
# namespace member of _yaml.
|
||||||
|
|
||||||
|
__name__ = '_yaml'
|
||||||
|
# If the module is top-level (i.e. not a part of any specific package)
|
||||||
|
# then the attribute should be set to ''.
|
||||||
|
# https://docs.python.org/3.8/library/types.html
|
||||||
|
__package__ = ''
|
||||||
@ -0,0 +1,103 @@
|
|||||||
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
"""
|
||||||
|
Classes Without Boilerplate
|
||||||
|
"""
|
||||||
|
|
||||||
|
from functools import partial
|
||||||
|
from typing import Callable
|
||||||
|
|
||||||
|
from . import converters, exceptions, filters, setters, validators
|
||||||
|
from ._cmp import cmp_using
|
||||||
|
from ._compat import Protocol
|
||||||
|
from ._config import get_run_validators, set_run_validators
|
||||||
|
from ._funcs import asdict, assoc, astuple, evolve, has, resolve_types
|
||||||
|
from ._make import (
|
||||||
|
NOTHING,
|
||||||
|
Attribute,
|
||||||
|
Converter,
|
||||||
|
Factory,
|
||||||
|
attrib,
|
||||||
|
attrs,
|
||||||
|
fields,
|
||||||
|
fields_dict,
|
||||||
|
make_class,
|
||||||
|
validate,
|
||||||
|
)
|
||||||
|
from ._next_gen import define, field, frozen, mutable
|
||||||
|
from ._version_info import VersionInfo
|
||||||
|
|
||||||
|
|
||||||
|
s = attributes = attrs
|
||||||
|
ib = attr = attrib
|
||||||
|
dataclass = partial(attrs, auto_attribs=True) # happy Easter ;)
|
||||||
|
|
||||||
|
|
||||||
|
class AttrsInstance(Protocol):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"Attribute",
|
||||||
|
"AttrsInstance",
|
||||||
|
"Converter",
|
||||||
|
"Factory",
|
||||||
|
"NOTHING",
|
||||||
|
"asdict",
|
||||||
|
"assoc",
|
||||||
|
"astuple",
|
||||||
|
"attr",
|
||||||
|
"attrib",
|
||||||
|
"attributes",
|
||||||
|
"attrs",
|
||||||
|
"cmp_using",
|
||||||
|
"converters",
|
||||||
|
"define",
|
||||||
|
"evolve",
|
||||||
|
"exceptions",
|
||||||
|
"field",
|
||||||
|
"fields",
|
||||||
|
"fields_dict",
|
||||||
|
"filters",
|
||||||
|
"frozen",
|
||||||
|
"get_run_validators",
|
||||||
|
"has",
|
||||||
|
"ib",
|
||||||
|
"make_class",
|
||||||
|
"mutable",
|
||||||
|
"resolve_types",
|
||||||
|
"s",
|
||||||
|
"set_run_validators",
|
||||||
|
"setters",
|
||||||
|
"validate",
|
||||||
|
"validators",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def _make_getattr(mod_name: str) -> Callable:
|
||||||
|
"""
|
||||||
|
Create a metadata proxy for packaging information that uses *mod_name* in
|
||||||
|
its warnings and errors.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __getattr__(name: str) -> str:
|
||||||
|
if name not in ("__version__", "__version_info__"):
|
||||||
|
msg = f"module {mod_name} has no attribute {name}"
|
||||||
|
raise AttributeError(msg)
|
||||||
|
|
||||||
|
try:
|
||||||
|
from importlib.metadata import metadata
|
||||||
|
except ImportError:
|
||||||
|
from importlib_metadata import metadata
|
||||||
|
|
||||||
|
meta = metadata("attrs")
|
||||||
|
|
||||||
|
if name == "__version_info__":
|
||||||
|
return VersionInfo._from_version_string(meta["version"])
|
||||||
|
|
||||||
|
return meta["version"]
|
||||||
|
|
||||||
|
return __getattr__
|
||||||
|
|
||||||
|
|
||||||
|
__getattr__ = _make_getattr(__name__)
|
||||||
@ -0,0 +1,388 @@
|
|||||||
|
import enum
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Callable,
|
||||||
|
Generic,
|
||||||
|
Mapping,
|
||||||
|
Protocol,
|
||||||
|
Sequence,
|
||||||
|
TypeVar,
|
||||||
|
overload,
|
||||||
|
)
|
||||||
|
|
||||||
|
# `import X as X` is required to make these public
|
||||||
|
from . import converters as converters
|
||||||
|
from . import exceptions as exceptions
|
||||||
|
from . import filters as filters
|
||||||
|
from . import setters as setters
|
||||||
|
from . import validators as validators
|
||||||
|
from ._cmp import cmp_using as cmp_using
|
||||||
|
from ._typing_compat import AttrsInstance_
|
||||||
|
from ._version_info import VersionInfo
|
||||||
|
from attrs import (
|
||||||
|
define as define,
|
||||||
|
field as field,
|
||||||
|
mutable as mutable,
|
||||||
|
frozen as frozen,
|
||||||
|
_EqOrderType,
|
||||||
|
_ValidatorType,
|
||||||
|
_ConverterType,
|
||||||
|
_ReprArgType,
|
||||||
|
_OnSetAttrType,
|
||||||
|
_OnSetAttrArgType,
|
||||||
|
_FieldTransformer,
|
||||||
|
_ValidatorArgType,
|
||||||
|
)
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 10):
|
||||||
|
from typing import TypeGuard
|
||||||
|
else:
|
||||||
|
from typing_extensions import TypeGuard
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 11):
|
||||||
|
from typing import dataclass_transform
|
||||||
|
else:
|
||||||
|
from typing_extensions import dataclass_transform
|
||||||
|
|
||||||
|
__version__: str
|
||||||
|
__version_info__: VersionInfo
|
||||||
|
__title__: str
|
||||||
|
__description__: str
|
||||||
|
__url__: str
|
||||||
|
__uri__: str
|
||||||
|
__author__: str
|
||||||
|
__email__: str
|
||||||
|
__license__: str
|
||||||
|
__copyright__: str
|
||||||
|
|
||||||
|
_T = TypeVar("_T")
|
||||||
|
_C = TypeVar("_C", bound=type)
|
||||||
|
|
||||||
|
_FilterType = Callable[["Attribute[_T]", _T], bool]
|
||||||
|
|
||||||
|
# We subclass this here to keep the protocol's qualified name clean.
|
||||||
|
class AttrsInstance(AttrsInstance_, Protocol):
|
||||||
|
pass
|
||||||
|
|
||||||
|
_A = TypeVar("_A", bound=type[AttrsInstance])
|
||||||
|
|
||||||
|
class _Nothing(enum.Enum):
|
||||||
|
NOTHING = enum.auto()
|
||||||
|
|
||||||
|
NOTHING = _Nothing.NOTHING
|
||||||
|
|
||||||
|
# NOTE: Factory lies about its return type to make this possible:
|
||||||
|
# `x: List[int] # = Factory(list)`
|
||||||
|
# Work around mypy issue #4554 in the common case by using an overload.
|
||||||
|
if sys.version_info >= (3, 8):
|
||||||
|
from typing import Literal
|
||||||
|
@overload
|
||||||
|
def Factory(factory: Callable[[], _T]) -> _T: ...
|
||||||
|
@overload
|
||||||
|
def Factory(
|
||||||
|
factory: Callable[[Any], _T],
|
||||||
|
takes_self: Literal[True],
|
||||||
|
) -> _T: ...
|
||||||
|
@overload
|
||||||
|
def Factory(
|
||||||
|
factory: Callable[[], _T],
|
||||||
|
takes_self: Literal[False],
|
||||||
|
) -> _T: ...
|
||||||
|
|
||||||
|
else:
|
||||||
|
@overload
|
||||||
|
def Factory(factory: Callable[[], _T]) -> _T: ...
|
||||||
|
@overload
|
||||||
|
def Factory(
|
||||||
|
factory: Union[Callable[[Any], _T], Callable[[], _T]],
|
||||||
|
takes_self: bool = ...,
|
||||||
|
) -> _T: ...
|
||||||
|
|
||||||
|
In = TypeVar("In")
|
||||||
|
Out = TypeVar("Out")
|
||||||
|
|
||||||
|
class Converter(Generic[In, Out]):
|
||||||
|
@overload
|
||||||
|
def __init__(self, converter: Callable[[In], Out]) -> None: ...
|
||||||
|
@overload
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
converter: Callable[[In, AttrsInstance, Attribute], Out],
|
||||||
|
*,
|
||||||
|
takes_self: Literal[True],
|
||||||
|
takes_field: Literal[True],
|
||||||
|
) -> None: ...
|
||||||
|
@overload
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
converter: Callable[[In, Attribute], Out],
|
||||||
|
*,
|
||||||
|
takes_field: Literal[True],
|
||||||
|
) -> None: ...
|
||||||
|
@overload
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
converter: Callable[[In, AttrsInstance], Out],
|
||||||
|
*,
|
||||||
|
takes_self: Literal[True],
|
||||||
|
) -> None: ...
|
||||||
|
|
||||||
|
class Attribute(Generic[_T]):
|
||||||
|
name: str
|
||||||
|
default: _T | None
|
||||||
|
validator: _ValidatorType[_T] | None
|
||||||
|
repr: _ReprArgType
|
||||||
|
cmp: _EqOrderType
|
||||||
|
eq: _EqOrderType
|
||||||
|
order: _EqOrderType
|
||||||
|
hash: bool | None
|
||||||
|
init: bool
|
||||||
|
converter: _ConverterType | Converter[Any, _T] | None
|
||||||
|
metadata: dict[Any, Any]
|
||||||
|
type: type[_T] | None
|
||||||
|
kw_only: bool
|
||||||
|
on_setattr: _OnSetAttrType
|
||||||
|
alias: str | None
|
||||||
|
|
||||||
|
def evolve(self, **changes: Any) -> "Attribute[Any]": ...
|
||||||
|
|
||||||
|
# NOTE: We had several choices for the annotation to use for type arg:
|
||||||
|
# 1) Type[_T]
|
||||||
|
# - Pros: Handles simple cases correctly
|
||||||
|
# - Cons: Might produce less informative errors in the case of conflicting
|
||||||
|
# TypeVars e.g. `attr.ib(default='bad', type=int)`
|
||||||
|
# 2) Callable[..., _T]
|
||||||
|
# - Pros: Better error messages than #1 for conflicting TypeVars
|
||||||
|
# - Cons: Terrible error messages for validator checks.
|
||||||
|
# e.g. attr.ib(type=int, validator=validate_str)
|
||||||
|
# -> error: Cannot infer function type argument
|
||||||
|
# 3) type (and do all of the work in the mypy plugin)
|
||||||
|
# - Pros: Simple here, and we could customize the plugin with our own errors.
|
||||||
|
# - Cons: Would need to write mypy plugin code to handle all the cases.
|
||||||
|
# We chose option #1.
|
||||||
|
|
||||||
|
# `attr` lies about its return type to make the following possible:
|
||||||
|
# attr() -> Any
|
||||||
|
# attr(8) -> int
|
||||||
|
# attr(validator=<some callable>) -> Whatever the callable expects.
|
||||||
|
# This makes this type of assignments possible:
|
||||||
|
# x: int = attr(8)
|
||||||
|
#
|
||||||
|
# This form catches explicit None or no default but with no other arguments
|
||||||
|
# returns Any.
|
||||||
|
@overload
|
||||||
|
def attrib(
|
||||||
|
default: None = ...,
|
||||||
|
validator: None = ...,
|
||||||
|
repr: _ReprArgType = ...,
|
||||||
|
cmp: _EqOrderType | None = ...,
|
||||||
|
hash: bool | None = ...,
|
||||||
|
init: bool = ...,
|
||||||
|
metadata: Mapping[Any, Any] | None = ...,
|
||||||
|
type: None = ...,
|
||||||
|
converter: None = ...,
|
||||||
|
factory: None = ...,
|
||||||
|
kw_only: bool = ...,
|
||||||
|
eq: _EqOrderType | None = ...,
|
||||||
|
order: _EqOrderType | None = ...,
|
||||||
|
on_setattr: _OnSetAttrArgType | None = ...,
|
||||||
|
alias: str | None = ...,
|
||||||
|
) -> Any: ...
|
||||||
|
|
||||||
|
# This form catches an explicit None or no default and infers the type from the
|
||||||
|
# other arguments.
|
||||||
|
@overload
|
||||||
|
def attrib(
|
||||||
|
default: None = ...,
|
||||||
|
validator: _ValidatorArgType[_T] | None = ...,
|
||||||
|
repr: _ReprArgType = ...,
|
||||||
|
cmp: _EqOrderType | None = ...,
|
||||||
|
hash: bool | None = ...,
|
||||||
|
init: bool = ...,
|
||||||
|
metadata: Mapping[Any, Any] | None = ...,
|
||||||
|
type: type[_T] | None = ...,
|
||||||
|
converter: _ConverterType | Converter[Any, _T] | None = ...,
|
||||||
|
factory: Callable[[], _T] | None = ...,
|
||||||
|
kw_only: bool = ...,
|
||||||
|
eq: _EqOrderType | None = ...,
|
||||||
|
order: _EqOrderType | None = ...,
|
||||||
|
on_setattr: _OnSetAttrArgType | None = ...,
|
||||||
|
alias: str | None = ...,
|
||||||
|
) -> _T: ...
|
||||||
|
|
||||||
|
# This form catches an explicit default argument.
|
||||||
|
@overload
|
||||||
|
def attrib(
|
||||||
|
default: _T,
|
||||||
|
validator: _ValidatorArgType[_T] | None = ...,
|
||||||
|
repr: _ReprArgType = ...,
|
||||||
|
cmp: _EqOrderType | None = ...,
|
||||||
|
hash: bool | None = ...,
|
||||||
|
init: bool = ...,
|
||||||
|
metadata: Mapping[Any, Any] | None = ...,
|
||||||
|
type: type[_T] | None = ...,
|
||||||
|
converter: _ConverterType | Converter[Any, _T] | None = ...,
|
||||||
|
factory: Callable[[], _T] | None = ...,
|
||||||
|
kw_only: bool = ...,
|
||||||
|
eq: _EqOrderType | None = ...,
|
||||||
|
order: _EqOrderType | None = ...,
|
||||||
|
on_setattr: _OnSetAttrArgType | None = ...,
|
||||||
|
alias: str | None = ...,
|
||||||
|
) -> _T: ...
|
||||||
|
|
||||||
|
# This form covers type=non-Type: e.g. forward references (str), Any
|
||||||
|
@overload
|
||||||
|
def attrib(
|
||||||
|
default: _T | None = ...,
|
||||||
|
validator: _ValidatorArgType[_T] | None = ...,
|
||||||
|
repr: _ReprArgType = ...,
|
||||||
|
cmp: _EqOrderType | None = ...,
|
||||||
|
hash: bool | None = ...,
|
||||||
|
init: bool = ...,
|
||||||
|
metadata: Mapping[Any, Any] | None = ...,
|
||||||
|
type: object = ...,
|
||||||
|
converter: _ConverterType | Converter[Any, _T] | None = ...,
|
||||||
|
factory: Callable[[], _T] | None = ...,
|
||||||
|
kw_only: bool = ...,
|
||||||
|
eq: _EqOrderType | None = ...,
|
||||||
|
order: _EqOrderType | None = ...,
|
||||||
|
on_setattr: _OnSetAttrArgType | None = ...,
|
||||||
|
alias: str | None = ...,
|
||||||
|
) -> Any: ...
|
||||||
|
@overload
|
||||||
|
@dataclass_transform(order_default=True, field_specifiers=(attrib, field))
|
||||||
|
def attrs(
|
||||||
|
maybe_cls: _C,
|
||||||
|
these: dict[str, Any] | None = ...,
|
||||||
|
repr_ns: str | None = ...,
|
||||||
|
repr: bool = ...,
|
||||||
|
cmp: _EqOrderType | None = ...,
|
||||||
|
hash: bool | None = ...,
|
||||||
|
init: bool = ...,
|
||||||
|
slots: bool = ...,
|
||||||
|
frozen: bool = ...,
|
||||||
|
weakref_slot: bool = ...,
|
||||||
|
str: bool = ...,
|
||||||
|
auto_attribs: bool = ...,
|
||||||
|
kw_only: bool = ...,
|
||||||
|
cache_hash: bool = ...,
|
||||||
|
auto_exc: bool = ...,
|
||||||
|
eq: _EqOrderType | None = ...,
|
||||||
|
order: _EqOrderType | None = ...,
|
||||||
|
auto_detect: bool = ...,
|
||||||
|
collect_by_mro: bool = ...,
|
||||||
|
getstate_setstate: bool | None = ...,
|
||||||
|
on_setattr: _OnSetAttrArgType | None = ...,
|
||||||
|
field_transformer: _FieldTransformer | None = ...,
|
||||||
|
match_args: bool = ...,
|
||||||
|
unsafe_hash: bool | None = ...,
|
||||||
|
) -> _C: ...
|
||||||
|
@overload
|
||||||
|
@dataclass_transform(order_default=True, field_specifiers=(attrib, field))
|
||||||
|
def attrs(
|
||||||
|
maybe_cls: None = ...,
|
||||||
|
these: dict[str, Any] | None = ...,
|
||||||
|
repr_ns: str | None = ...,
|
||||||
|
repr: bool = ...,
|
||||||
|
cmp: _EqOrderType | None = ...,
|
||||||
|
hash: bool | None = ...,
|
||||||
|
init: bool = ...,
|
||||||
|
slots: bool = ...,
|
||||||
|
frozen: bool = ...,
|
||||||
|
weakref_slot: bool = ...,
|
||||||
|
str: bool = ...,
|
||||||
|
auto_attribs: bool = ...,
|
||||||
|
kw_only: bool = ...,
|
||||||
|
cache_hash: bool = ...,
|
||||||
|
auto_exc: bool = ...,
|
||||||
|
eq: _EqOrderType | None = ...,
|
||||||
|
order: _EqOrderType | None = ...,
|
||||||
|
auto_detect: bool = ...,
|
||||||
|
collect_by_mro: bool = ...,
|
||||||
|
getstate_setstate: bool | None = ...,
|
||||||
|
on_setattr: _OnSetAttrArgType | None = ...,
|
||||||
|
field_transformer: _FieldTransformer | None = ...,
|
||||||
|
match_args: bool = ...,
|
||||||
|
unsafe_hash: bool | None = ...,
|
||||||
|
) -> Callable[[_C], _C]: ...
|
||||||
|
def fields(cls: type[AttrsInstance]) -> Any: ...
|
||||||
|
def fields_dict(cls: type[AttrsInstance]) -> dict[str, Attribute[Any]]: ...
|
||||||
|
def validate(inst: AttrsInstance) -> None: ...
|
||||||
|
def resolve_types(
|
||||||
|
cls: _A,
|
||||||
|
globalns: dict[str, Any] | None = ...,
|
||||||
|
localns: dict[str, Any] | None = ...,
|
||||||
|
attribs: list[Attribute[Any]] | None = ...,
|
||||||
|
include_extras: bool = ...,
|
||||||
|
) -> _A: ...
|
||||||
|
|
||||||
|
# TODO: add support for returning a proper attrs class from the mypy plugin
|
||||||
|
# we use Any instead of _CountingAttr so that e.g. `make_class('Foo',
|
||||||
|
# [attr.ib()])` is valid
|
||||||
|
def make_class(
|
||||||
|
name: str,
|
||||||
|
attrs: list[str] | tuple[str, ...] | dict[str, Any],
|
||||||
|
bases: tuple[type, ...] = ...,
|
||||||
|
class_body: dict[str, Any] | None = ...,
|
||||||
|
repr_ns: str | None = ...,
|
||||||
|
repr: bool = ...,
|
||||||
|
cmp: _EqOrderType | None = ...,
|
||||||
|
hash: bool | None = ...,
|
||||||
|
init: bool = ...,
|
||||||
|
slots: bool = ...,
|
||||||
|
frozen: bool = ...,
|
||||||
|
weakref_slot: bool = ...,
|
||||||
|
str: bool = ...,
|
||||||
|
auto_attribs: bool = ...,
|
||||||
|
kw_only: bool = ...,
|
||||||
|
cache_hash: bool = ...,
|
||||||
|
auto_exc: bool = ...,
|
||||||
|
eq: _EqOrderType | None = ...,
|
||||||
|
order: _EqOrderType | None = ...,
|
||||||
|
collect_by_mro: bool = ...,
|
||||||
|
on_setattr: _OnSetAttrArgType | None = ...,
|
||||||
|
field_transformer: _FieldTransformer | None = ...,
|
||||||
|
) -> type: ...
|
||||||
|
|
||||||
|
# _funcs --
|
||||||
|
|
||||||
|
# TODO: add support for returning TypedDict from the mypy plugin
|
||||||
|
# FIXME: asdict/astuple do not honor their factory args. Waiting on one of
|
||||||
|
# these:
|
||||||
|
# https://github.com/python/mypy/issues/4236
|
||||||
|
# https://github.com/python/typing/issues/253
|
||||||
|
# XXX: remember to fix attrs.asdict/astuple too!
|
||||||
|
def asdict(
|
||||||
|
inst: AttrsInstance,
|
||||||
|
recurse: bool = ...,
|
||||||
|
filter: _FilterType[Any] | None = ...,
|
||||||
|
dict_factory: type[Mapping[Any, Any]] = ...,
|
||||||
|
retain_collection_types: bool = ...,
|
||||||
|
value_serializer: Callable[[type, Attribute[Any], Any], Any] | None = ...,
|
||||||
|
tuple_keys: bool | None = ...,
|
||||||
|
) -> dict[str, Any]: ...
|
||||||
|
|
||||||
|
# TODO: add support for returning NamedTuple from the mypy plugin
|
||||||
|
def astuple(
|
||||||
|
inst: AttrsInstance,
|
||||||
|
recurse: bool = ...,
|
||||||
|
filter: _FilterType[Any] | None = ...,
|
||||||
|
tuple_factory: type[Sequence[Any]] = ...,
|
||||||
|
retain_collection_types: bool = ...,
|
||||||
|
) -> tuple[Any, ...]: ...
|
||||||
|
def has(cls: type) -> TypeGuard[type[AttrsInstance]]: ...
|
||||||
|
def assoc(inst: _T, **changes: Any) -> _T: ...
|
||||||
|
def evolve(inst: _T, **changes: Any) -> _T: ...
|
||||||
|
|
||||||
|
# _config --
|
||||||
|
|
||||||
|
def set_run_validators(run: bool) -> None: ...
|
||||||
|
def get_run_validators() -> bool: ...
|
||||||
|
|
||||||
|
# aliases --
|
||||||
|
|
||||||
|
s = attributes = attrs
|
||||||
|
ib = attr = attrib
|
||||||
|
dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;)
|
||||||
@ -0,0 +1,160 @@
|
|||||||
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
|
||||||
|
import functools
|
||||||
|
import types
|
||||||
|
|
||||||
|
from ._make import _make_ne
|
||||||
|
|
||||||
|
|
||||||
|
_operation_names = {"eq": "==", "lt": "<", "le": "<=", "gt": ">", "ge": ">="}
|
||||||
|
|
||||||
|
|
||||||
|
def cmp_using(
|
||||||
|
eq=None,
|
||||||
|
lt=None,
|
||||||
|
le=None,
|
||||||
|
gt=None,
|
||||||
|
ge=None,
|
||||||
|
require_same_type=True,
|
||||||
|
class_name="Comparable",
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Create a class that can be passed into `attrs.field`'s ``eq``, ``order``,
|
||||||
|
and ``cmp`` arguments to customize field comparison.
|
||||||
|
|
||||||
|
The resulting class will have a full set of ordering methods if at least
|
||||||
|
one of ``{lt, le, gt, ge}`` and ``eq`` are provided.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
eq (typing.Callable | None):
|
||||||
|
Callable used to evaluate equality of two objects.
|
||||||
|
|
||||||
|
lt (typing.Callable | None):
|
||||||
|
Callable used to evaluate whether one object is less than another
|
||||||
|
object.
|
||||||
|
|
||||||
|
le (typing.Callable | None):
|
||||||
|
Callable used to evaluate whether one object is less than or equal
|
||||||
|
to another object.
|
||||||
|
|
||||||
|
gt (typing.Callable | None):
|
||||||
|
Callable used to evaluate whether one object is greater than
|
||||||
|
another object.
|
||||||
|
|
||||||
|
ge (typing.Callable | None):
|
||||||
|
Callable used to evaluate whether one object is greater than or
|
||||||
|
equal to another object.
|
||||||
|
|
||||||
|
require_same_type (bool):
|
||||||
|
When `True`, equality and ordering methods will return
|
||||||
|
`NotImplemented` if objects are not of the same type.
|
||||||
|
|
||||||
|
class_name (str | None): Name of class. Defaults to "Comparable".
|
||||||
|
|
||||||
|
See `comparison` for more details.
|
||||||
|
|
||||||
|
.. versionadded:: 21.1.0
|
||||||
|
"""
|
||||||
|
|
||||||
|
body = {
|
||||||
|
"__slots__": ["value"],
|
||||||
|
"__init__": _make_init(),
|
||||||
|
"_requirements": [],
|
||||||
|
"_is_comparable_to": _is_comparable_to,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Add operations.
|
||||||
|
num_order_functions = 0
|
||||||
|
has_eq_function = False
|
||||||
|
|
||||||
|
if eq is not None:
|
||||||
|
has_eq_function = True
|
||||||
|
body["__eq__"] = _make_operator("eq", eq)
|
||||||
|
body["__ne__"] = _make_ne()
|
||||||
|
|
||||||
|
if lt is not None:
|
||||||
|
num_order_functions += 1
|
||||||
|
body["__lt__"] = _make_operator("lt", lt)
|
||||||
|
|
||||||
|
if le is not None:
|
||||||
|
num_order_functions += 1
|
||||||
|
body["__le__"] = _make_operator("le", le)
|
||||||
|
|
||||||
|
if gt is not None:
|
||||||
|
num_order_functions += 1
|
||||||
|
body["__gt__"] = _make_operator("gt", gt)
|
||||||
|
|
||||||
|
if ge is not None:
|
||||||
|
num_order_functions += 1
|
||||||
|
body["__ge__"] = _make_operator("ge", ge)
|
||||||
|
|
||||||
|
type_ = types.new_class(
|
||||||
|
class_name, (object,), {}, lambda ns: ns.update(body)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add same type requirement.
|
||||||
|
if require_same_type:
|
||||||
|
type_._requirements.append(_check_same_type)
|
||||||
|
|
||||||
|
# Add total ordering if at least one operation was defined.
|
||||||
|
if 0 < num_order_functions < 4:
|
||||||
|
if not has_eq_function:
|
||||||
|
# functools.total_ordering requires __eq__ to be defined,
|
||||||
|
# so raise early error here to keep a nice stack.
|
||||||
|
msg = "eq must be define is order to complete ordering from lt, le, gt, ge."
|
||||||
|
raise ValueError(msg)
|
||||||
|
type_ = functools.total_ordering(type_)
|
||||||
|
|
||||||
|
return type_
|
||||||
|
|
||||||
|
|
||||||
|
def _make_init():
|
||||||
|
"""
|
||||||
|
Create __init__ method.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, value):
|
||||||
|
"""
|
||||||
|
Initialize object with *value*.
|
||||||
|
"""
|
||||||
|
self.value = value
|
||||||
|
|
||||||
|
return __init__
|
||||||
|
|
||||||
|
|
||||||
|
def _make_operator(name, func):
|
||||||
|
"""
|
||||||
|
Create operator method.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def method(self, other):
|
||||||
|
if not self._is_comparable_to(other):
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
result = func(self.value, other.value)
|
||||||
|
if result is NotImplemented:
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
method.__name__ = f"__{name}__"
|
||||||
|
method.__doc__ = (
|
||||||
|
f"Return a {_operation_names[name]} b. Computed by attrs."
|
||||||
|
)
|
||||||
|
|
||||||
|
return method
|
||||||
|
|
||||||
|
|
||||||
|
def _is_comparable_to(self, other):
|
||||||
|
"""
|
||||||
|
Check whether `other` is comparable to `self`.
|
||||||
|
"""
|
||||||
|
return all(func(self, other) for func in self._requirements)
|
||||||
|
|
||||||
|
|
||||||
|
def _check_same_type(self, other):
|
||||||
|
"""
|
||||||
|
Return True if *self* and *other* are of the same type, False otherwise.
|
||||||
|
"""
|
||||||
|
return other.value.__class__ is self.value.__class__
|
||||||
@ -0,0 +1,13 @@
|
|||||||
|
from typing import Any, Callable
|
||||||
|
|
||||||
|
_CompareWithType = Callable[[Any, Any], bool]
|
||||||
|
|
||||||
|
def cmp_using(
|
||||||
|
eq: _CompareWithType | None = ...,
|
||||||
|
lt: _CompareWithType | None = ...,
|
||||||
|
le: _CompareWithType | None = ...,
|
||||||
|
gt: _CompareWithType | None = ...,
|
||||||
|
ge: _CompareWithType | None = ...,
|
||||||
|
require_same_type: bool = ...,
|
||||||
|
class_name: str = ...,
|
||||||
|
) -> type: ...
|
||||||
@ -0,0 +1,103 @@
|
|||||||
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
import inspect
|
||||||
|
import platform
|
||||||
|
import sys
|
||||||
|
import threading
|
||||||
|
|
||||||
|
from collections.abc import Mapping, Sequence # noqa: F401
|
||||||
|
from typing import _GenericAlias
|
||||||
|
|
||||||
|
|
||||||
|
PYPY = platform.python_implementation() == "PyPy"
|
||||||
|
PY_3_8_PLUS = sys.version_info[:2] >= (3, 8)
|
||||||
|
PY_3_9_PLUS = sys.version_info[:2] >= (3, 9)
|
||||||
|
PY_3_10_PLUS = sys.version_info[:2] >= (3, 10)
|
||||||
|
PY_3_11_PLUS = sys.version_info[:2] >= (3, 11)
|
||||||
|
PY_3_12_PLUS = sys.version_info[:2] >= (3, 12)
|
||||||
|
PY_3_13_PLUS = sys.version_info[:2] >= (3, 13)
|
||||||
|
PY_3_14_PLUS = sys.version_info[:2] >= (3, 14)
|
||||||
|
|
||||||
|
|
||||||
|
if sys.version_info < (3, 8):
|
||||||
|
try:
|
||||||
|
from typing_extensions import Protocol
|
||||||
|
except ImportError: # pragma: no cover
|
||||||
|
Protocol = object
|
||||||
|
else:
|
||||||
|
from typing import Protocol # noqa: F401
|
||||||
|
|
||||||
|
if PY_3_14_PLUS: # pragma: no cover
|
||||||
|
import annotationlib
|
||||||
|
|
||||||
|
_get_annotations = annotationlib.get_annotations
|
||||||
|
|
||||||
|
else:
|
||||||
|
|
||||||
|
def _get_annotations(cls):
|
||||||
|
"""
|
||||||
|
Get annotations for *cls*.
|
||||||
|
"""
|
||||||
|
return cls.__dict__.get("__annotations__", {})
|
||||||
|
|
||||||
|
|
||||||
|
class _AnnotationExtractor:
|
||||||
|
"""
|
||||||
|
Extract type annotations from a callable, returning None whenever there
|
||||||
|
is none.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__slots__ = ["sig"]
|
||||||
|
|
||||||
|
def __init__(self, callable):
|
||||||
|
try:
|
||||||
|
self.sig = inspect.signature(callable)
|
||||||
|
except (ValueError, TypeError): # inspect failed
|
||||||
|
self.sig = None
|
||||||
|
|
||||||
|
def get_first_param_type(self):
|
||||||
|
"""
|
||||||
|
Return the type annotation of the first argument if it's not empty.
|
||||||
|
"""
|
||||||
|
if not self.sig:
|
||||||
|
return None
|
||||||
|
|
||||||
|
params = list(self.sig.parameters.values())
|
||||||
|
if params and params[0].annotation is not inspect.Parameter.empty:
|
||||||
|
return params[0].annotation
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_return_type(self):
|
||||||
|
"""
|
||||||
|
Return the return type if it's not empty.
|
||||||
|
"""
|
||||||
|
if (
|
||||||
|
self.sig
|
||||||
|
and self.sig.return_annotation is not inspect.Signature.empty
|
||||||
|
):
|
||||||
|
return self.sig.return_annotation
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
# Thread-local global to track attrs instances which are already being repr'd.
|
||||||
|
# This is needed because there is no other (thread-safe) way to pass info
|
||||||
|
# about the instances that are already being repr'd through the call stack
|
||||||
|
# in order to ensure we don't perform infinite recursion.
|
||||||
|
#
|
||||||
|
# For instance, if an instance contains a dict which contains that instance,
|
||||||
|
# we need to know that we're already repr'ing the outside instance from within
|
||||||
|
# the dict's repr() call.
|
||||||
|
#
|
||||||
|
# This lives here rather than in _make.py so that the functions in _make.py
|
||||||
|
# don't have a direct reference to the thread-local in their globals dict.
|
||||||
|
# If they have such a reference, it breaks cloudpickle.
|
||||||
|
repr_context = threading.local()
|
||||||
|
|
||||||
|
|
||||||
|
def get_generic_base(cl):
|
||||||
|
"""If this is a generic class (A[str]), return the generic base for it."""
|
||||||
|
if cl.__class__ is _GenericAlias:
|
||||||
|
return cl.__origin__
|
||||||
|
return None
|
||||||
@ -0,0 +1,31 @@
|
|||||||
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
__all__ = ["set_run_validators", "get_run_validators"]
|
||||||
|
|
||||||
|
_run_validators = True
|
||||||
|
|
||||||
|
|
||||||
|
def set_run_validators(run):
|
||||||
|
"""
|
||||||
|
Set whether or not validators are run. By default, they are run.
|
||||||
|
|
||||||
|
.. deprecated:: 21.3.0 It will not be removed, but it also will not be
|
||||||
|
moved to new ``attrs`` namespace. Use `attrs.validators.set_disabled()`
|
||||||
|
instead.
|
||||||
|
"""
|
||||||
|
if not isinstance(run, bool):
|
||||||
|
msg = "'run' must be bool."
|
||||||
|
raise TypeError(msg)
|
||||||
|
global _run_validators
|
||||||
|
_run_validators = run
|
||||||
|
|
||||||
|
|
||||||
|
def get_run_validators():
|
||||||
|
"""
|
||||||
|
Return whether or not validators are run.
|
||||||
|
|
||||||
|
.. deprecated:: 21.3.0 It will not be removed, but it also will not be
|
||||||
|
moved to new ``attrs`` namespace. Use `attrs.validators.get_disabled()`
|
||||||
|
instead.
|
||||||
|
"""
|
||||||
|
return _run_validators
|
||||||
@ -0,0 +1,522 @@
|
|||||||
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
|
||||||
|
import copy
|
||||||
|
|
||||||
|
from ._compat import PY_3_9_PLUS, get_generic_base
|
||||||
|
from ._make import _OBJ_SETATTR, NOTHING, fields
|
||||||
|
from .exceptions import AttrsAttributeNotFoundError
|
||||||
|
|
||||||
|
|
||||||
|
def asdict(
|
||||||
|
inst,
|
||||||
|
recurse=True,
|
||||||
|
filter=None,
|
||||||
|
dict_factory=dict,
|
||||||
|
retain_collection_types=False,
|
||||||
|
value_serializer=None,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Return the *attrs* attribute values of *inst* as a dict.
|
||||||
|
|
||||||
|
Optionally recurse into other *attrs*-decorated classes.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
inst: Instance of an *attrs*-decorated class.
|
||||||
|
|
||||||
|
recurse (bool): Recurse into classes that are also *attrs*-decorated.
|
||||||
|
|
||||||
|
filter (~typing.Callable):
|
||||||
|
A callable whose return code determines whether an attribute or
|
||||||
|
element is included (`True`) or dropped (`False`). Is called with
|
||||||
|
the `attrs.Attribute` as the first argument and the value as the
|
||||||
|
second argument.
|
||||||
|
|
||||||
|
dict_factory (~typing.Callable):
|
||||||
|
A callable to produce dictionaries from. For example, to produce
|
||||||
|
ordered dictionaries instead of normal Python dictionaries, pass in
|
||||||
|
``collections.OrderedDict``.
|
||||||
|
|
||||||
|
retain_collection_types (bool):
|
||||||
|
Do not convert to `list` when encountering an attribute whose type
|
||||||
|
is `tuple` or `set`. Only meaningful if *recurse* is `True`.
|
||||||
|
|
||||||
|
value_serializer (typing.Callable | None):
|
||||||
|
A hook that is called for every attribute or dict key/value. It
|
||||||
|
receives the current instance, field and value and must return the
|
||||||
|
(updated) value. The hook is run *after* the optional *filter* has
|
||||||
|
been applied.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Return type of *dict_factory*.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
attrs.exceptions.NotAnAttrsClassError:
|
||||||
|
If *cls* is not an *attrs* class.
|
||||||
|
|
||||||
|
.. versionadded:: 16.0.0 *dict_factory*
|
||||||
|
.. versionadded:: 16.1.0 *retain_collection_types*
|
||||||
|
.. versionadded:: 20.3.0 *value_serializer*
|
||||||
|
.. versionadded:: 21.3.0
|
||||||
|
If a dict has a collection for a key, it is serialized as a tuple.
|
||||||
|
"""
|
||||||
|
attrs = fields(inst.__class__)
|
||||||
|
rv = dict_factory()
|
||||||
|
for a in attrs:
|
||||||
|
v = getattr(inst, a.name)
|
||||||
|
if filter is not None and not filter(a, v):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if value_serializer is not None:
|
||||||
|
v = value_serializer(inst, a, v)
|
||||||
|
|
||||||
|
if recurse is True:
|
||||||
|
if has(v.__class__):
|
||||||
|
rv[a.name] = asdict(
|
||||||
|
v,
|
||||||
|
recurse=True,
|
||||||
|
filter=filter,
|
||||||
|
dict_factory=dict_factory,
|
||||||
|
retain_collection_types=retain_collection_types,
|
||||||
|
value_serializer=value_serializer,
|
||||||
|
)
|
||||||
|
elif isinstance(v, (tuple, list, set, frozenset)):
|
||||||
|
cf = v.__class__ if retain_collection_types is True else list
|
||||||
|
items = [
|
||||||
|
_asdict_anything(
|
||||||
|
i,
|
||||||
|
is_key=False,
|
||||||
|
filter=filter,
|
||||||
|
dict_factory=dict_factory,
|
||||||
|
retain_collection_types=retain_collection_types,
|
||||||
|
value_serializer=value_serializer,
|
||||||
|
)
|
||||||
|
for i in v
|
||||||
|
]
|
||||||
|
try:
|
||||||
|
rv[a.name] = cf(items)
|
||||||
|
except TypeError:
|
||||||
|
if not issubclass(cf, tuple):
|
||||||
|
raise
|
||||||
|
# Workaround for TypeError: cf.__new__() missing 1 required
|
||||||
|
# positional argument (which appears, for a namedturle)
|
||||||
|
rv[a.name] = cf(*items)
|
||||||
|
elif isinstance(v, dict):
|
||||||
|
df = dict_factory
|
||||||
|
rv[a.name] = df(
|
||||||
|
(
|
||||||
|
_asdict_anything(
|
||||||
|
kk,
|
||||||
|
is_key=True,
|
||||||
|
filter=filter,
|
||||||
|
dict_factory=df,
|
||||||
|
retain_collection_types=retain_collection_types,
|
||||||
|
value_serializer=value_serializer,
|
||||||
|
),
|
||||||
|
_asdict_anything(
|
||||||
|
vv,
|
||||||
|
is_key=False,
|
||||||
|
filter=filter,
|
||||||
|
dict_factory=df,
|
||||||
|
retain_collection_types=retain_collection_types,
|
||||||
|
value_serializer=value_serializer,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
for kk, vv in v.items()
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
rv[a.name] = v
|
||||||
|
else:
|
||||||
|
rv[a.name] = v
|
||||||
|
return rv
|
||||||
|
|
||||||
|
|
||||||
|
def _asdict_anything(
|
||||||
|
val,
|
||||||
|
is_key,
|
||||||
|
filter,
|
||||||
|
dict_factory,
|
||||||
|
retain_collection_types,
|
||||||
|
value_serializer,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
``asdict`` only works on attrs instances, this works on anything.
|
||||||
|
"""
|
||||||
|
if getattr(val.__class__, "__attrs_attrs__", None) is not None:
|
||||||
|
# Attrs class.
|
||||||
|
rv = asdict(
|
||||||
|
val,
|
||||||
|
recurse=True,
|
||||||
|
filter=filter,
|
||||||
|
dict_factory=dict_factory,
|
||||||
|
retain_collection_types=retain_collection_types,
|
||||||
|
value_serializer=value_serializer,
|
||||||
|
)
|
||||||
|
elif isinstance(val, (tuple, list, set, frozenset)):
|
||||||
|
if retain_collection_types is True:
|
||||||
|
cf = val.__class__
|
||||||
|
elif is_key:
|
||||||
|
cf = tuple
|
||||||
|
else:
|
||||||
|
cf = list
|
||||||
|
|
||||||
|
rv = cf(
|
||||||
|
[
|
||||||
|
_asdict_anything(
|
||||||
|
i,
|
||||||
|
is_key=False,
|
||||||
|
filter=filter,
|
||||||
|
dict_factory=dict_factory,
|
||||||
|
retain_collection_types=retain_collection_types,
|
||||||
|
value_serializer=value_serializer,
|
||||||
|
)
|
||||||
|
for i in val
|
||||||
|
]
|
||||||
|
)
|
||||||
|
elif isinstance(val, dict):
|
||||||
|
df = dict_factory
|
||||||
|
rv = df(
|
||||||
|
(
|
||||||
|
_asdict_anything(
|
||||||
|
kk,
|
||||||
|
is_key=True,
|
||||||
|
filter=filter,
|
||||||
|
dict_factory=df,
|
||||||
|
retain_collection_types=retain_collection_types,
|
||||||
|
value_serializer=value_serializer,
|
||||||
|
),
|
||||||
|
_asdict_anything(
|
||||||
|
vv,
|
||||||
|
is_key=False,
|
||||||
|
filter=filter,
|
||||||
|
dict_factory=df,
|
||||||
|
retain_collection_types=retain_collection_types,
|
||||||
|
value_serializer=value_serializer,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
for kk, vv in val.items()
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
rv = val
|
||||||
|
if value_serializer is not None:
|
||||||
|
rv = value_serializer(None, None, rv)
|
||||||
|
|
||||||
|
return rv
|
||||||
|
|
||||||
|
|
||||||
|
def astuple(
|
||||||
|
inst,
|
||||||
|
recurse=True,
|
||||||
|
filter=None,
|
||||||
|
tuple_factory=tuple,
|
||||||
|
retain_collection_types=False,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Return the *attrs* attribute values of *inst* as a tuple.
|
||||||
|
|
||||||
|
Optionally recurse into other *attrs*-decorated classes.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
inst: Instance of an *attrs*-decorated class.
|
||||||
|
|
||||||
|
recurse (bool):
|
||||||
|
Recurse into classes that are also *attrs*-decorated.
|
||||||
|
|
||||||
|
filter (~typing.Callable):
|
||||||
|
A callable whose return code determines whether an attribute or
|
||||||
|
element is included (`True`) or dropped (`False`). Is called with
|
||||||
|
the `attrs.Attribute` as the first argument and the value as the
|
||||||
|
second argument.
|
||||||
|
|
||||||
|
tuple_factory (~typing.Callable):
|
||||||
|
A callable to produce tuples from. For example, to produce lists
|
||||||
|
instead of tuples.
|
||||||
|
|
||||||
|
retain_collection_types (bool):
|
||||||
|
Do not convert to `list` or `dict` when encountering an attribute
|
||||||
|
which type is `tuple`, `dict` or `set`. Only meaningful if
|
||||||
|
*recurse* is `True`.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Return type of *tuple_factory*
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
attrs.exceptions.NotAnAttrsClassError:
|
||||||
|
If *cls* is not an *attrs* class.
|
||||||
|
|
||||||
|
.. versionadded:: 16.2.0
|
||||||
|
"""
|
||||||
|
attrs = fields(inst.__class__)
|
||||||
|
rv = []
|
||||||
|
retain = retain_collection_types # Very long. :/
|
||||||
|
for a in attrs:
|
||||||
|
v = getattr(inst, a.name)
|
||||||
|
if filter is not None and not filter(a, v):
|
||||||
|
continue
|
||||||
|
if recurse is True:
|
||||||
|
if has(v.__class__):
|
||||||
|
rv.append(
|
||||||
|
astuple(
|
||||||
|
v,
|
||||||
|
recurse=True,
|
||||||
|
filter=filter,
|
||||||
|
tuple_factory=tuple_factory,
|
||||||
|
retain_collection_types=retain,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
elif isinstance(v, (tuple, list, set, frozenset)):
|
||||||
|
cf = v.__class__ if retain is True else list
|
||||||
|
items = [
|
||||||
|
(
|
||||||
|
astuple(
|
||||||
|
j,
|
||||||
|
recurse=True,
|
||||||
|
filter=filter,
|
||||||
|
tuple_factory=tuple_factory,
|
||||||
|
retain_collection_types=retain,
|
||||||
|
)
|
||||||
|
if has(j.__class__)
|
||||||
|
else j
|
||||||
|
)
|
||||||
|
for j in v
|
||||||
|
]
|
||||||
|
try:
|
||||||
|
rv.append(cf(items))
|
||||||
|
except TypeError:
|
||||||
|
if not issubclass(cf, tuple):
|
||||||
|
raise
|
||||||
|
# Workaround for TypeError: cf.__new__() missing 1 required
|
||||||
|
# positional argument (which appears, for a namedturle)
|
||||||
|
rv.append(cf(*items))
|
||||||
|
elif isinstance(v, dict):
|
||||||
|
df = v.__class__ if retain is True else dict
|
||||||
|
rv.append(
|
||||||
|
df(
|
||||||
|
(
|
||||||
|
(
|
||||||
|
astuple(
|
||||||
|
kk,
|
||||||
|
tuple_factory=tuple_factory,
|
||||||
|
retain_collection_types=retain,
|
||||||
|
)
|
||||||
|
if has(kk.__class__)
|
||||||
|
else kk
|
||||||
|
),
|
||||||
|
(
|
||||||
|
astuple(
|
||||||
|
vv,
|
||||||
|
tuple_factory=tuple_factory,
|
||||||
|
retain_collection_types=retain,
|
||||||
|
)
|
||||||
|
if has(vv.__class__)
|
||||||
|
else vv
|
||||||
|
),
|
||||||
|
)
|
||||||
|
for kk, vv in v.items()
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
rv.append(v)
|
||||||
|
else:
|
||||||
|
rv.append(v)
|
||||||
|
|
||||||
|
return rv if tuple_factory is list else tuple_factory(rv)
|
||||||
|
|
||||||
|
|
||||||
|
def has(cls):
|
||||||
|
"""
|
||||||
|
Check whether *cls* is a class with *attrs* attributes.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
cls (type): Class to introspect.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
TypeError: If *cls* is not a class.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool:
|
||||||
|
"""
|
||||||
|
attrs = getattr(cls, "__attrs_attrs__", None)
|
||||||
|
if attrs is not None:
|
||||||
|
return True
|
||||||
|
|
||||||
|
# No attrs, maybe it's a specialized generic (A[str])?
|
||||||
|
generic_base = get_generic_base(cls)
|
||||||
|
if generic_base is not None:
|
||||||
|
generic_attrs = getattr(generic_base, "__attrs_attrs__", None)
|
||||||
|
if generic_attrs is not None:
|
||||||
|
# Stick it on here for speed next time.
|
||||||
|
cls.__attrs_attrs__ = generic_attrs
|
||||||
|
return generic_attrs is not None
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def assoc(inst, **changes):
|
||||||
|
"""
|
||||||
|
Copy *inst* and apply *changes*.
|
||||||
|
|
||||||
|
This is different from `evolve` that applies the changes to the arguments
|
||||||
|
that create the new instance.
|
||||||
|
|
||||||
|
`evolve`'s behavior is preferable, but there are `edge cases`_ where it
|
||||||
|
doesn't work. Therefore `assoc` is deprecated, but will not be removed.
|
||||||
|
|
||||||
|
.. _`edge cases`: https://github.com/python-attrs/attrs/issues/251
|
||||||
|
|
||||||
|
Args:
|
||||||
|
inst: Instance of a class with *attrs* attributes.
|
||||||
|
|
||||||
|
changes: Keyword changes in the new copy.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A copy of inst with *changes* incorporated.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
attrs.exceptions.AttrsAttributeNotFoundError:
|
||||||
|
If *attr_name* couldn't be found on *cls*.
|
||||||
|
|
||||||
|
attrs.exceptions.NotAnAttrsClassError:
|
||||||
|
If *cls* is not an *attrs* class.
|
||||||
|
|
||||||
|
.. deprecated:: 17.1.0
|
||||||
|
Use `attrs.evolve` instead if you can. This function will not be
|
||||||
|
removed du to the slightly different approach compared to
|
||||||
|
`attrs.evolve`, though.
|
||||||
|
"""
|
||||||
|
new = copy.copy(inst)
|
||||||
|
attrs = fields(inst.__class__)
|
||||||
|
for k, v in changes.items():
|
||||||
|
a = getattr(attrs, k, NOTHING)
|
||||||
|
if a is NOTHING:
|
||||||
|
msg = f"{k} is not an attrs attribute on {new.__class__}."
|
||||||
|
raise AttrsAttributeNotFoundError(msg)
|
||||||
|
_OBJ_SETATTR(new, k, v)
|
||||||
|
return new
|
||||||
|
|
||||||
|
|
||||||
|
def evolve(*args, **changes):
|
||||||
|
"""
|
||||||
|
Create a new instance, based on the first positional argument with
|
||||||
|
*changes* applied.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
|
||||||
|
inst:
|
||||||
|
Instance of a class with *attrs* attributes. *inst* must be passed
|
||||||
|
as a positional argument.
|
||||||
|
|
||||||
|
changes:
|
||||||
|
Keyword changes in the new copy.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A copy of inst with *changes* incorporated.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
TypeError:
|
||||||
|
If *attr_name* couldn't be found in the class ``__init__``.
|
||||||
|
|
||||||
|
attrs.exceptions.NotAnAttrsClassError:
|
||||||
|
If *cls* is not an *attrs* class.
|
||||||
|
|
||||||
|
.. versionadded:: 17.1.0
|
||||||
|
.. deprecated:: 23.1.0
|
||||||
|
It is now deprecated to pass the instance using the keyword argument
|
||||||
|
*inst*. It will raise a warning until at least April 2024, after which
|
||||||
|
it will become an error. Always pass the instance as a positional
|
||||||
|
argument.
|
||||||
|
.. versionchanged:: 24.1.0
|
||||||
|
*inst* can't be passed as a keyword argument anymore.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
(inst,) = args
|
||||||
|
except ValueError:
|
||||||
|
msg = (
|
||||||
|
f"evolve() takes 1 positional argument, but {len(args)} were given"
|
||||||
|
)
|
||||||
|
raise TypeError(msg) from None
|
||||||
|
|
||||||
|
cls = inst.__class__
|
||||||
|
attrs = fields(cls)
|
||||||
|
for a in attrs:
|
||||||
|
if not a.init:
|
||||||
|
continue
|
||||||
|
attr_name = a.name # To deal with private attributes.
|
||||||
|
init_name = a.alias
|
||||||
|
if init_name not in changes:
|
||||||
|
changes[init_name] = getattr(inst, attr_name)
|
||||||
|
|
||||||
|
return cls(**changes)
|
||||||
|
|
||||||
|
|
||||||
|
def resolve_types(
|
||||||
|
cls, globalns=None, localns=None, attribs=None, include_extras=True
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Resolve any strings and forward annotations in type annotations.
|
||||||
|
|
||||||
|
This is only required if you need concrete types in :class:`Attribute`'s
|
||||||
|
*type* field. In other words, you don't need to resolve your types if you
|
||||||
|
only use them for static type checking.
|
||||||
|
|
||||||
|
With no arguments, names will be looked up in the module in which the class
|
||||||
|
was created. If this is not what you want, for example, if the name only
|
||||||
|
exists inside a method, you may pass *globalns* or *localns* to specify
|
||||||
|
other dictionaries in which to look up these names. See the docs of
|
||||||
|
`typing.get_type_hints` for more details.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
cls (type): Class to resolve.
|
||||||
|
|
||||||
|
globalns (dict | None): Dictionary containing global variables.
|
||||||
|
|
||||||
|
localns (dict | None): Dictionary containing local variables.
|
||||||
|
|
||||||
|
attribs (list | None):
|
||||||
|
List of attribs for the given class. This is necessary when calling
|
||||||
|
from inside a ``field_transformer`` since *cls* is not an *attrs*
|
||||||
|
class yet.
|
||||||
|
|
||||||
|
include_extras (bool):
|
||||||
|
Resolve more accurately, if possible. Pass ``include_extras`` to
|
||||||
|
``typing.get_hints``, if supported by the typing module. On
|
||||||
|
supported Python versions (3.9+), this resolves the types more
|
||||||
|
accurately.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
TypeError: If *cls* is not a class.
|
||||||
|
|
||||||
|
attrs.exceptions.NotAnAttrsClassError:
|
||||||
|
If *cls* is not an *attrs* class and you didn't pass any attribs.
|
||||||
|
|
||||||
|
NameError: If types cannot be resolved because of missing variables.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
*cls* so you can use this function also as a class decorator. Please
|
||||||
|
note that you have to apply it **after** `attrs.define`. That means the
|
||||||
|
decorator has to come in the line **before** `attrs.define`.
|
||||||
|
|
||||||
|
.. versionadded:: 20.1.0
|
||||||
|
.. versionadded:: 21.1.0 *attribs*
|
||||||
|
.. versionadded:: 23.1.0 *include_extras*
|
||||||
|
"""
|
||||||
|
# Since calling get_type_hints is expensive we cache whether we've
|
||||||
|
# done it already.
|
||||||
|
if getattr(cls, "__attrs_types_resolved__", None) != cls:
|
||||||
|
import typing
|
||||||
|
|
||||||
|
kwargs = {"globalns": globalns, "localns": localns}
|
||||||
|
|
||||||
|
if PY_3_9_PLUS:
|
||||||
|
kwargs["include_extras"] = include_extras
|
||||||
|
|
||||||
|
hints = typing.get_type_hints(cls, **kwargs)
|
||||||
|
for field in fields(cls) if attribs is None else attribs:
|
||||||
|
if field.name in hints:
|
||||||
|
# Since fields have been frozen we must work around it.
|
||||||
|
_OBJ_SETATTR(field, "type", hints[field.name])
|
||||||
|
# We store the class we resolved so that subclasses know they haven't
|
||||||
|
# been resolved.
|
||||||
|
cls.__attrs_types_resolved__ = cls
|
||||||
|
|
||||||
|
# Return the class so you can use it as a decorator too.
|
||||||
|
return cls
|
||||||
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,631 @@
|
|||||||
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
"""
|
||||||
|
These are keyword-only APIs that call `attr.s` and `attr.ib` with different
|
||||||
|
default values.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
from functools import partial
|
||||||
|
|
||||||
|
from . import setters
|
||||||
|
from ._funcs import asdict as _asdict
|
||||||
|
from ._funcs import astuple as _astuple
|
||||||
|
from ._make import (
|
||||||
|
_DEFAULT_ON_SETATTR,
|
||||||
|
NOTHING,
|
||||||
|
_frozen_setattrs,
|
||||||
|
attrib,
|
||||||
|
attrs,
|
||||||
|
)
|
||||||
|
from .exceptions import UnannotatedAttributeError
|
||||||
|
|
||||||
|
|
||||||
|
def define(
|
||||||
|
maybe_cls=None,
|
||||||
|
*,
|
||||||
|
these=None,
|
||||||
|
repr=None,
|
||||||
|
unsafe_hash=None,
|
||||||
|
hash=None,
|
||||||
|
init=None,
|
||||||
|
slots=True,
|
||||||
|
frozen=False,
|
||||||
|
weakref_slot=True,
|
||||||
|
str=False,
|
||||||
|
auto_attribs=None,
|
||||||
|
kw_only=False,
|
||||||
|
cache_hash=False,
|
||||||
|
auto_exc=True,
|
||||||
|
eq=None,
|
||||||
|
order=False,
|
||||||
|
auto_detect=True,
|
||||||
|
getstate_setstate=None,
|
||||||
|
on_setattr=None,
|
||||||
|
field_transformer=None,
|
||||||
|
match_args=True,
|
||||||
|
):
|
||||||
|
r"""
|
||||||
|
A class decorator that adds :term:`dunder methods` according to
|
||||||
|
:term:`fields <field>` specified using :doc:`type annotations <types>`,
|
||||||
|
`field()` calls, or the *these* argument.
|
||||||
|
|
||||||
|
Since *attrs* patches or replaces an existing class, you cannot use
|
||||||
|
`object.__init_subclass__` with *attrs* classes, because it runs too early.
|
||||||
|
As a replacement, you can define ``__attrs_init_subclass__`` on your class.
|
||||||
|
It will be called by *attrs* classes that subclass it after they're
|
||||||
|
created. See also :ref:`init-subclass`.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
slots (bool):
|
||||||
|
Create a :term:`slotted class <slotted classes>` that's more
|
||||||
|
memory-efficient. Slotted classes are generally superior to the
|
||||||
|
default dict classes, but have some gotchas you should know about,
|
||||||
|
so we encourage you to read the :term:`glossary entry <slotted
|
||||||
|
classes>`.
|
||||||
|
|
||||||
|
auto_detect (bool):
|
||||||
|
Instead of setting the *init*, *repr*, *eq*, and *hash* arguments
|
||||||
|
explicitly, assume they are set to True **unless any** of the
|
||||||
|
involved methods for one of the arguments is implemented in the
|
||||||
|
*current* class (meaning, it is *not* inherited from some base
|
||||||
|
class).
|
||||||
|
|
||||||
|
So, for example by implementing ``__eq__`` on a class yourself,
|
||||||
|
*attrs* will deduce ``eq=False`` and will create *neither*
|
||||||
|
``__eq__`` *nor* ``__ne__`` (but Python classes come with a
|
||||||
|
sensible ``__ne__`` by default, so it *should* be enough to only
|
||||||
|
implement ``__eq__`` in most cases).
|
||||||
|
|
||||||
|
Passing True or False` to *init*, *repr*, *eq*, *cmp*, or *hash*
|
||||||
|
overrides whatever *auto_detect* would determine.
|
||||||
|
|
||||||
|
auto_exc (bool):
|
||||||
|
If the class subclasses `BaseException` (which implicitly includes
|
||||||
|
any subclass of any exception), the following happens to behave
|
||||||
|
like a well-behaved Python exception class:
|
||||||
|
|
||||||
|
- the values for *eq*, *order*, and *hash* are ignored and the
|
||||||
|
instances compare and hash by the instance's ids [#]_ ,
|
||||||
|
- all attributes that are either passed into ``__init__`` or have a
|
||||||
|
default value are additionally available as a tuple in the
|
||||||
|
``args`` attribute,
|
||||||
|
- the value of *str* is ignored leaving ``__str__`` to base
|
||||||
|
classes.
|
||||||
|
|
||||||
|
.. [#]
|
||||||
|
Note that *attrs* will *not* remove existing implementations of
|
||||||
|
``__hash__`` or the equality methods. It just won't add own
|
||||||
|
ones.
|
||||||
|
|
||||||
|
on_setattr (~typing.Callable | list[~typing.Callable] | None | ~typing.Literal[attrs.setters.NO_OP]):
|
||||||
|
A callable that is run whenever the user attempts to set an
|
||||||
|
attribute (either by assignment like ``i.x = 42`` or by using
|
||||||
|
`setattr` like ``setattr(i, "x", 42)``). It receives the same
|
||||||
|
arguments as validators: the instance, the attribute that is being
|
||||||
|
modified, and the new value.
|
||||||
|
|
||||||
|
If no exception is raised, the attribute is set to the return value
|
||||||
|
of the callable.
|
||||||
|
|
||||||
|
If a list of callables is passed, they're automatically wrapped in
|
||||||
|
an `attrs.setters.pipe`.
|
||||||
|
|
||||||
|
If left None, the default behavior is to run converters and
|
||||||
|
validators whenever an attribute is set.
|
||||||
|
|
||||||
|
init (bool):
|
||||||
|
Create a ``__init__`` method that initializes the *attrs*
|
||||||
|
attributes. Leading underscores are stripped for the argument name,
|
||||||
|
unless an alias is set on the attribute.
|
||||||
|
|
||||||
|
.. seealso::
|
||||||
|
`init` shows advanced ways to customize the generated
|
||||||
|
``__init__`` method, including executing code before and after.
|
||||||
|
|
||||||
|
repr(bool):
|
||||||
|
Create a ``__repr__`` method with a human readable representation
|
||||||
|
of *attrs* attributes.
|
||||||
|
|
||||||
|
str (bool):
|
||||||
|
Create a ``__str__`` method that is identical to ``__repr__``. This
|
||||||
|
is usually not necessary except for `Exception`\ s.
|
||||||
|
|
||||||
|
eq (bool | None):
|
||||||
|
If True or None (default), add ``__eq__`` and ``__ne__`` methods
|
||||||
|
that check two instances for equality.
|
||||||
|
|
||||||
|
.. seealso::
|
||||||
|
`comparison` describes how to customize the comparison behavior
|
||||||
|
going as far comparing NumPy arrays.
|
||||||
|
|
||||||
|
order (bool | None):
|
||||||
|
If True, add ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__``
|
||||||
|
methods that behave like *eq* above and allow instances to be
|
||||||
|
ordered.
|
||||||
|
|
||||||
|
They compare the instances as if they were tuples of their *attrs*
|
||||||
|
attributes if and only if the types of both classes are
|
||||||
|
*identical*.
|
||||||
|
|
||||||
|
If `None` mirror value of *eq*.
|
||||||
|
|
||||||
|
.. seealso:: `comparison`
|
||||||
|
|
||||||
|
cmp (bool | None):
|
||||||
|
Setting *cmp* is equivalent to setting *eq* and *order* to the same
|
||||||
|
value. Must not be mixed with *eq* or *order*.
|
||||||
|
|
||||||
|
unsafe_hash (bool | None):
|
||||||
|
If None (default), the ``__hash__`` method is generated according
|
||||||
|
how *eq* and *frozen* are set.
|
||||||
|
|
||||||
|
1. If *both* are True, *attrs* will generate a ``__hash__`` for
|
||||||
|
you.
|
||||||
|
2. If *eq* is True and *frozen* is False, ``__hash__`` will be set
|
||||||
|
to None, marking it unhashable (which it is).
|
||||||
|
3. If *eq* is False, ``__hash__`` will be left untouched meaning
|
||||||
|
the ``__hash__`` method of the base class will be used. If the
|
||||||
|
base class is `object`, this means it will fall back to id-based
|
||||||
|
hashing.
|
||||||
|
|
||||||
|
Although not recommended, you can decide for yourself and force
|
||||||
|
*attrs* to create one (for example, if the class is immutable even
|
||||||
|
though you didn't freeze it programmatically) by passing True or
|
||||||
|
not. Both of these cases are rather special and should be used
|
||||||
|
carefully.
|
||||||
|
|
||||||
|
.. seealso::
|
||||||
|
|
||||||
|
- Our documentation on `hashing`,
|
||||||
|
- Python's documentation on `object.__hash__`,
|
||||||
|
- and the `GitHub issue that led to the default \ behavior
|
||||||
|
<https://github.com/python-attrs/attrs/issues/136>`_ for more
|
||||||
|
details.
|
||||||
|
|
||||||
|
hash (bool | None):
|
||||||
|
Deprecated alias for *unsafe_hash*. *unsafe_hash* takes precedence.
|
||||||
|
|
||||||
|
cache_hash (bool):
|
||||||
|
Ensure that the object's hash code is computed only once and stored
|
||||||
|
on the object. If this is set to True, hashing must be either
|
||||||
|
explicitly or implicitly enabled for this class. If the hash code
|
||||||
|
is cached, avoid any reassignments of fields involved in hash code
|
||||||
|
computation or mutations of the objects those fields point to after
|
||||||
|
object creation. If such changes occur, the behavior of the
|
||||||
|
object's hash code is undefined.
|
||||||
|
|
||||||
|
frozen (bool):
|
||||||
|
Make instances immutable after initialization. If someone attempts
|
||||||
|
to modify a frozen instance, `attrs.exceptions.FrozenInstanceError`
|
||||||
|
is raised.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
1. This is achieved by installing a custom ``__setattr__``
|
||||||
|
method on your class, so you can't implement your own.
|
||||||
|
|
||||||
|
2. True immutability is impossible in Python.
|
||||||
|
|
||||||
|
3. This *does* have a minor a runtime performance `impact
|
||||||
|
<how-frozen>` when initializing new instances. In other
|
||||||
|
words: ``__init__`` is slightly slower with ``frozen=True``.
|
||||||
|
|
||||||
|
4. If a class is frozen, you cannot modify ``self`` in
|
||||||
|
``__attrs_post_init__`` or a self-written ``__init__``. You
|
||||||
|
can circumvent that limitation by using
|
||||||
|
``object.__setattr__(self, "attribute_name", value)``.
|
||||||
|
|
||||||
|
5. Subclasses of a frozen class are frozen too.
|
||||||
|
|
||||||
|
kw_only (bool):
|
||||||
|
Make all attributes keyword-only in the generated ``__init__`` (if
|
||||||
|
*init* is False, this parameter is ignored).
|
||||||
|
|
||||||
|
weakref_slot (bool):
|
||||||
|
Make instances weak-referenceable. This has no effect unless
|
||||||
|
*slots* is True.
|
||||||
|
|
||||||
|
field_transformer (~typing.Callable | None):
|
||||||
|
A function that is called with the original class object and all
|
||||||
|
fields right before *attrs* finalizes the class. You can use this,
|
||||||
|
for example, to automatically add converters or validators to
|
||||||
|
fields based on their types.
|
||||||
|
|
||||||
|
.. seealso:: `transform-fields`
|
||||||
|
|
||||||
|
match_args (bool):
|
||||||
|
If True (default), set ``__match_args__`` on the class to support
|
||||||
|
:pep:`634` (*Structural Pattern Matching*). It is a tuple of all
|
||||||
|
non-keyword-only ``__init__`` parameter names on Python 3.10 and
|
||||||
|
later. Ignored on older Python versions.
|
||||||
|
|
||||||
|
collect_by_mro (bool):
|
||||||
|
If True, *attrs* collects attributes from base classes correctly
|
||||||
|
according to the `method resolution order
|
||||||
|
<https://docs.python.org/3/howto/mro.html>`_. If False, *attrs*
|
||||||
|
will mimic the (wrong) behavior of `dataclasses` and :pep:`681`.
|
||||||
|
|
||||||
|
See also `issue #428
|
||||||
|
<https://github.com/python-attrs/attrs/issues/428>`_.
|
||||||
|
|
||||||
|
getstate_setstate (bool | None):
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
This is usually only interesting for slotted classes and you
|
||||||
|
should probably just set *auto_detect* to True.
|
||||||
|
|
||||||
|
If True, ``__getstate__`` and ``__setstate__`` are generated and
|
||||||
|
attached to the class. This is necessary for slotted classes to be
|
||||||
|
pickleable. If left None, it's True by default for slotted classes
|
||||||
|
and False for dict classes.
|
||||||
|
|
||||||
|
If *auto_detect* is True, and *getstate_setstate* is left None, and
|
||||||
|
**either** ``__getstate__`` or ``__setstate__`` is detected
|
||||||
|
directly on the class (meaning: not inherited), it is set to False
|
||||||
|
(this is usually what you want).
|
||||||
|
|
||||||
|
auto_attribs (bool | None):
|
||||||
|
If True, look at type annotations to determine which attributes to
|
||||||
|
use, like `dataclasses`. If False, it will only look for explicit
|
||||||
|
:func:`field` class attributes, like classic *attrs*.
|
||||||
|
|
||||||
|
If left None, it will guess:
|
||||||
|
|
||||||
|
1. If any attributes are annotated and no unannotated
|
||||||
|
`attrs.field`\ s are found, it assumes *auto_attribs=True*.
|
||||||
|
2. Otherwise it assumes *auto_attribs=False* and tries to collect
|
||||||
|
`attrs.field`\ s.
|
||||||
|
|
||||||
|
If *attrs* decides to look at type annotations, **all** fields
|
||||||
|
**must** be annotated. If *attrs* encounters a field that is set to
|
||||||
|
a :func:`field` / `attr.ib` but lacks a type annotation, an
|
||||||
|
`attrs.exceptions.UnannotatedAttributeError` is raised. Use
|
||||||
|
``field_name: typing.Any = field(...)`` if you don't want to set a
|
||||||
|
type.
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
|
||||||
|
For features that use the attribute name to create decorators
|
||||||
|
(for example, :ref:`validators <validators>`), you still *must*
|
||||||
|
assign :func:`field` / `attr.ib` to them. Otherwise Python will
|
||||||
|
either not find the name or try to use the default value to
|
||||||
|
call, for example, ``validator`` on it.
|
||||||
|
|
||||||
|
Attributes annotated as `typing.ClassVar`, and attributes that are
|
||||||
|
neither annotated nor set to an `field()` are **ignored**.
|
||||||
|
|
||||||
|
these (dict[str, object]):
|
||||||
|
A dictionary of name to the (private) return value of `field()`
|
||||||
|
mappings. This is useful to avoid the definition of your attributes
|
||||||
|
within the class body because you can't (for example, if you want
|
||||||
|
to add ``__repr__`` methods to Django models) or don't want to.
|
||||||
|
|
||||||
|
If *these* is not `None`, *attrs* will *not* search the class body
|
||||||
|
for attributes and will *not* remove any attributes from it.
|
||||||
|
|
||||||
|
The order is deduced from the order of the attributes inside
|
||||||
|
*these*.
|
||||||
|
|
||||||
|
Arguably, this is a rather obscure feature.
|
||||||
|
|
||||||
|
.. versionadded:: 20.1.0
|
||||||
|
.. versionchanged:: 21.3.0 Converters are also run ``on_setattr``.
|
||||||
|
.. versionadded:: 22.2.0
|
||||||
|
*unsafe_hash* as an alias for *hash* (for :pep:`681` compliance).
|
||||||
|
.. versionchanged:: 24.1.0
|
||||||
|
Instances are not compared as tuples of attributes anymore, but using a
|
||||||
|
big ``and`` condition. This is faster and has more correct behavior for
|
||||||
|
uncomparable values like `math.nan`.
|
||||||
|
.. versionadded:: 24.1.0
|
||||||
|
If a class has an *inherited* classmethod called
|
||||||
|
``__attrs_init_subclass__``, it is executed after the class is created.
|
||||||
|
.. deprecated:: 24.1.0 *hash* is deprecated in favor of *unsafe_hash*.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
The main differences to the classic `attr.s` are:
|
||||||
|
|
||||||
|
- Automatically detect whether or not *auto_attribs* should be `True`
|
||||||
|
(c.f. *auto_attribs* parameter).
|
||||||
|
- Converters and validators run when attributes are set by default --
|
||||||
|
if *frozen* is `False`.
|
||||||
|
- *slots=True*
|
||||||
|
|
||||||
|
Usually, this has only upsides and few visible effects in everyday
|
||||||
|
programming. But it *can* lead to some surprising behaviors, so
|
||||||
|
please make sure to read :term:`slotted classes`.
|
||||||
|
|
||||||
|
- *auto_exc=True*
|
||||||
|
- *auto_detect=True*
|
||||||
|
- *order=False*
|
||||||
|
- Some options that were only relevant on Python 2 or were kept around
|
||||||
|
for backwards-compatibility have been removed.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def do_it(cls, auto_attribs):
|
||||||
|
return attrs(
|
||||||
|
maybe_cls=cls,
|
||||||
|
these=these,
|
||||||
|
repr=repr,
|
||||||
|
hash=hash,
|
||||||
|
unsafe_hash=unsafe_hash,
|
||||||
|
init=init,
|
||||||
|
slots=slots,
|
||||||
|
frozen=frozen,
|
||||||
|
weakref_slot=weakref_slot,
|
||||||
|
str=str,
|
||||||
|
auto_attribs=auto_attribs,
|
||||||
|
kw_only=kw_only,
|
||||||
|
cache_hash=cache_hash,
|
||||||
|
auto_exc=auto_exc,
|
||||||
|
eq=eq,
|
||||||
|
order=order,
|
||||||
|
auto_detect=auto_detect,
|
||||||
|
collect_by_mro=True,
|
||||||
|
getstate_setstate=getstate_setstate,
|
||||||
|
on_setattr=on_setattr,
|
||||||
|
field_transformer=field_transformer,
|
||||||
|
match_args=match_args,
|
||||||
|
)
|
||||||
|
|
||||||
|
def wrap(cls):
|
||||||
|
"""
|
||||||
|
Making this a wrapper ensures this code runs during class creation.
|
||||||
|
|
||||||
|
We also ensure that frozen-ness of classes is inherited.
|
||||||
|
"""
|
||||||
|
nonlocal frozen, on_setattr
|
||||||
|
|
||||||
|
had_on_setattr = on_setattr not in (None, setters.NO_OP)
|
||||||
|
|
||||||
|
# By default, mutable classes convert & validate on setattr.
|
||||||
|
if frozen is False and on_setattr is None:
|
||||||
|
on_setattr = _DEFAULT_ON_SETATTR
|
||||||
|
|
||||||
|
# However, if we subclass a frozen class, we inherit the immutability
|
||||||
|
# and disable on_setattr.
|
||||||
|
for base_cls in cls.__bases__:
|
||||||
|
if base_cls.__setattr__ is _frozen_setattrs:
|
||||||
|
if had_on_setattr:
|
||||||
|
msg = "Frozen classes can't use on_setattr (frozen-ness was inherited)."
|
||||||
|
raise ValueError(msg)
|
||||||
|
|
||||||
|
on_setattr = setters.NO_OP
|
||||||
|
break
|
||||||
|
|
||||||
|
if auto_attribs is not None:
|
||||||
|
return do_it(cls, auto_attribs)
|
||||||
|
|
||||||
|
try:
|
||||||
|
return do_it(cls, True)
|
||||||
|
except UnannotatedAttributeError:
|
||||||
|
return do_it(cls, False)
|
||||||
|
|
||||||
|
# maybe_cls's type depends on the usage of the decorator. It's a class
|
||||||
|
# if it's used as `@attrs` but `None` if used as `@attrs()`.
|
||||||
|
if maybe_cls is None:
|
||||||
|
return wrap
|
||||||
|
|
||||||
|
return wrap(maybe_cls)
|
||||||
|
|
||||||
|
|
||||||
|
mutable = define
|
||||||
|
frozen = partial(define, frozen=True, on_setattr=None)
|
||||||
|
|
||||||
|
|
||||||
|
def field(
|
||||||
|
*,
|
||||||
|
default=NOTHING,
|
||||||
|
validator=None,
|
||||||
|
repr=True,
|
||||||
|
hash=None,
|
||||||
|
init=True,
|
||||||
|
metadata=None,
|
||||||
|
type=None,
|
||||||
|
converter=None,
|
||||||
|
factory=None,
|
||||||
|
kw_only=False,
|
||||||
|
eq=None,
|
||||||
|
order=None,
|
||||||
|
on_setattr=None,
|
||||||
|
alias=None,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Create a new :term:`field` / :term:`attribute` on a class.
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
|
||||||
|
Does **nothing** unless the class is also decorated with
|
||||||
|
`attrs.define` (or similar)!
|
||||||
|
|
||||||
|
Args:
|
||||||
|
default:
|
||||||
|
A value that is used if an *attrs*-generated ``__init__`` is used
|
||||||
|
and no value is passed while instantiating or the attribute is
|
||||||
|
excluded using ``init=False``.
|
||||||
|
|
||||||
|
If the value is an instance of `attrs.Factory`, its callable will
|
||||||
|
be used to construct a new value (useful for mutable data types
|
||||||
|
like lists or dicts).
|
||||||
|
|
||||||
|
If a default is not set (or set manually to `attrs.NOTHING`), a
|
||||||
|
value *must* be supplied when instantiating; otherwise a
|
||||||
|
`TypeError` will be raised.
|
||||||
|
|
||||||
|
.. seealso:: `defaults`
|
||||||
|
|
||||||
|
factory (~typing.Callable):
|
||||||
|
Syntactic sugar for ``default=attr.Factory(factory)``.
|
||||||
|
|
||||||
|
validator (~typing.Callable | list[~typing.Callable]):
|
||||||
|
Callable that is called by *attrs*-generated ``__init__`` methods
|
||||||
|
after the instance has been initialized. They receive the
|
||||||
|
initialized instance, the :func:`~attrs.Attribute`, and the passed
|
||||||
|
value.
|
||||||
|
|
||||||
|
The return value is *not* inspected so the validator has to throw
|
||||||
|
an exception itself.
|
||||||
|
|
||||||
|
If a `list` is passed, its items are treated as validators and must
|
||||||
|
all pass.
|
||||||
|
|
||||||
|
Validators can be globally disabled and re-enabled using
|
||||||
|
`attrs.validators.get_disabled` / `attrs.validators.set_disabled`.
|
||||||
|
|
||||||
|
The validator can also be set using decorator notation as shown
|
||||||
|
below.
|
||||||
|
|
||||||
|
.. seealso:: :ref:`validators`
|
||||||
|
|
||||||
|
repr (bool | ~typing.Callable):
|
||||||
|
Include this attribute in the generated ``__repr__`` method. If
|
||||||
|
True, include the attribute; if False, omit it. By default, the
|
||||||
|
built-in ``repr()`` function is used. To override how the attribute
|
||||||
|
value is formatted, pass a ``callable`` that takes a single value
|
||||||
|
and returns a string. Note that the resulting string is used as-is,
|
||||||
|
which means it will be used directly *instead* of calling
|
||||||
|
``repr()`` (the default).
|
||||||
|
|
||||||
|
eq (bool | ~typing.Callable):
|
||||||
|
If True (default), include this attribute in the generated
|
||||||
|
``__eq__`` and ``__ne__`` methods that check two instances for
|
||||||
|
equality. To override how the attribute value is compared, pass a
|
||||||
|
callable that takes a single value and returns the value to be
|
||||||
|
compared.
|
||||||
|
|
||||||
|
.. seealso:: `comparison`
|
||||||
|
|
||||||
|
order (bool | ~typing.Callable):
|
||||||
|
If True (default), include this attributes in the generated
|
||||||
|
``__lt__``, ``__le__``, ``__gt__`` and ``__ge__`` methods. To
|
||||||
|
override how the attribute value is ordered, pass a callable that
|
||||||
|
takes a single value and returns the value to be ordered.
|
||||||
|
|
||||||
|
.. seealso:: `comparison`
|
||||||
|
|
||||||
|
cmp(bool | ~typing.Callable):
|
||||||
|
Setting *cmp* is equivalent to setting *eq* and *order* to the same
|
||||||
|
value. Must not be mixed with *eq* or *order*.
|
||||||
|
|
||||||
|
.. seealso:: `comparison`
|
||||||
|
|
||||||
|
hash (bool | None):
|
||||||
|
Include this attribute in the generated ``__hash__`` method. If
|
||||||
|
None (default), mirror *eq*'s value. This is the correct behavior
|
||||||
|
according the Python spec. Setting this value to anything else
|
||||||
|
than None is *discouraged*.
|
||||||
|
|
||||||
|
.. seealso:: `hashing`
|
||||||
|
|
||||||
|
init (bool):
|
||||||
|
Include this attribute in the generated ``__init__`` method.
|
||||||
|
|
||||||
|
It is possible to set this to False and set a default value. In
|
||||||
|
that case this attributed is unconditionally initialized with the
|
||||||
|
specified default value or factory.
|
||||||
|
|
||||||
|
.. seealso:: `init`
|
||||||
|
|
||||||
|
converter (typing.Callable | Converter):
|
||||||
|
A callable that is called by *attrs*-generated ``__init__`` methods
|
||||||
|
to convert attribute's value to the desired format.
|
||||||
|
|
||||||
|
If a vanilla callable is passed, it is given the passed-in value as
|
||||||
|
the only positional argument. It is possible to receive additional
|
||||||
|
arguments by wrapping the callable in a `Converter`.
|
||||||
|
|
||||||
|
Either way, the returned value will be used as the new value of the
|
||||||
|
attribute. The value is converted before being passed to the
|
||||||
|
validator, if any.
|
||||||
|
|
||||||
|
.. seealso:: :ref:`converters`
|
||||||
|
|
||||||
|
metadata (dict | None):
|
||||||
|
An arbitrary mapping, to be used by third-party code.
|
||||||
|
|
||||||
|
.. seealso:: `extending-metadata`.
|
||||||
|
|
||||||
|
type (type):
|
||||||
|
The type of the attribute. Nowadays, the preferred method to
|
||||||
|
specify the type is using a variable annotation (see :pep:`526`).
|
||||||
|
This argument is provided for backwards-compatibility and for usage
|
||||||
|
with `make_class`. Regardless of the approach used, the type will
|
||||||
|
be stored on ``Attribute.type``.
|
||||||
|
|
||||||
|
Please note that *attrs* doesn't do anything with this metadata by
|
||||||
|
itself. You can use it as part of your own code or for `static type
|
||||||
|
checking <types>`.
|
||||||
|
|
||||||
|
kw_only (bool):
|
||||||
|
Make this attribute keyword-only in the generated ``__init__`` (if
|
||||||
|
``init`` is False, this parameter is ignored).
|
||||||
|
|
||||||
|
on_setattr (~typing.Callable | list[~typing.Callable] | None | ~typing.Literal[attrs.setters.NO_OP]):
|
||||||
|
Allows to overwrite the *on_setattr* setting from `attr.s`. If left
|
||||||
|
None, the *on_setattr* value from `attr.s` is used. Set to
|
||||||
|
`attrs.setters.NO_OP` to run **no** `setattr` hooks for this
|
||||||
|
attribute -- regardless of the setting in `define()`.
|
||||||
|
|
||||||
|
alias (str | None):
|
||||||
|
Override this attribute's parameter name in the generated
|
||||||
|
``__init__`` method. If left None, default to ``name`` stripped
|
||||||
|
of leading underscores. See `private-attributes`.
|
||||||
|
|
||||||
|
.. versionadded:: 20.1.0
|
||||||
|
.. versionchanged:: 21.1.0
|
||||||
|
*eq*, *order*, and *cmp* also accept a custom callable
|
||||||
|
.. versionadded:: 22.2.0 *alias*
|
||||||
|
.. versionadded:: 23.1.0
|
||||||
|
The *type* parameter has been re-added; mostly for `attrs.make_class`.
|
||||||
|
Please note that type checkers ignore this metadata.
|
||||||
|
|
||||||
|
.. seealso::
|
||||||
|
|
||||||
|
`attr.ib`
|
||||||
|
"""
|
||||||
|
return attrib(
|
||||||
|
default=default,
|
||||||
|
validator=validator,
|
||||||
|
repr=repr,
|
||||||
|
hash=hash,
|
||||||
|
init=init,
|
||||||
|
metadata=metadata,
|
||||||
|
type=type,
|
||||||
|
converter=converter,
|
||||||
|
factory=factory,
|
||||||
|
kw_only=kw_only,
|
||||||
|
eq=eq,
|
||||||
|
order=order,
|
||||||
|
on_setattr=on_setattr,
|
||||||
|
alias=alias,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def asdict(inst, *, recurse=True, filter=None, value_serializer=None):
|
||||||
|
"""
|
||||||
|
Same as `attr.asdict`, except that collections types are always retained
|
||||||
|
and dict is always used as *dict_factory*.
|
||||||
|
|
||||||
|
.. versionadded:: 21.3.0
|
||||||
|
"""
|
||||||
|
return _asdict(
|
||||||
|
inst=inst,
|
||||||
|
recurse=recurse,
|
||||||
|
filter=filter,
|
||||||
|
value_serializer=value_serializer,
|
||||||
|
retain_collection_types=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def astuple(inst, *, recurse=True, filter=None):
|
||||||
|
"""
|
||||||
|
Same as `attr.astuple`, except that collections types are always retained
|
||||||
|
and `tuple` is always used as the *tuple_factory*.
|
||||||
|
|
||||||
|
.. versionadded:: 21.3.0
|
||||||
|
"""
|
||||||
|
return _astuple(
|
||||||
|
inst=inst, recurse=recurse, filter=filter, retain_collection_types=True
|
||||||
|
)
|
||||||
@ -0,0 +1,15 @@
|
|||||||
|
from typing import Any, ClassVar, Protocol
|
||||||
|
|
||||||
|
# MYPY is a special constant in mypy which works the same way as `TYPE_CHECKING`.
|
||||||
|
MYPY = False
|
||||||
|
|
||||||
|
if MYPY:
|
||||||
|
# A protocol to be able to statically accept an attrs class.
|
||||||
|
class AttrsInstance_(Protocol):
|
||||||
|
__attrs_attrs__: ClassVar[Any]
|
||||||
|
|
||||||
|
else:
|
||||||
|
# For type checkers without plug-in support use an empty protocol that
|
||||||
|
# will (hopefully) be combined into a union.
|
||||||
|
class AttrsInstance_(Protocol):
|
||||||
|
pass
|
||||||
@ -0,0 +1,86 @@
|
|||||||
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
|
||||||
|
from functools import total_ordering
|
||||||
|
|
||||||
|
from ._funcs import astuple
|
||||||
|
from ._make import attrib, attrs
|
||||||
|
|
||||||
|
|
||||||
|
@total_ordering
|
||||||
|
@attrs(eq=False, order=False, slots=True, frozen=True)
|
||||||
|
class VersionInfo:
|
||||||
|
"""
|
||||||
|
A version object that can be compared to tuple of length 1--4:
|
||||||
|
|
||||||
|
>>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2)
|
||||||
|
True
|
||||||
|
>>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1)
|
||||||
|
True
|
||||||
|
>>> vi = attr.VersionInfo(19, 2, 0, "final")
|
||||||
|
>>> vi < (19, 1, 1)
|
||||||
|
False
|
||||||
|
>>> vi < (19,)
|
||||||
|
False
|
||||||
|
>>> vi == (19, 2,)
|
||||||
|
True
|
||||||
|
>>> vi == (19, 2, 1)
|
||||||
|
False
|
||||||
|
|
||||||
|
.. versionadded:: 19.2
|
||||||
|
"""
|
||||||
|
|
||||||
|
year = attrib(type=int)
|
||||||
|
minor = attrib(type=int)
|
||||||
|
micro = attrib(type=int)
|
||||||
|
releaselevel = attrib(type=str)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _from_version_string(cls, s):
|
||||||
|
"""
|
||||||
|
Parse *s* and return a _VersionInfo.
|
||||||
|
"""
|
||||||
|
v = s.split(".")
|
||||||
|
if len(v) == 3:
|
||||||
|
v.append("final")
|
||||||
|
|
||||||
|
return cls(
|
||||||
|
year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3]
|
||||||
|
)
|
||||||
|
|
||||||
|
def _ensure_tuple(self, other):
|
||||||
|
"""
|
||||||
|
Ensure *other* is a tuple of a valid length.
|
||||||
|
|
||||||
|
Returns a possibly transformed *other* and ourselves as a tuple of
|
||||||
|
the same length as *other*.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if self.__class__ is other.__class__:
|
||||||
|
other = astuple(other)
|
||||||
|
|
||||||
|
if not isinstance(other, tuple):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
if not (1 <= len(other) <= 4):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
return astuple(self)[: len(other)], other
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
try:
|
||||||
|
us, them = self._ensure_tuple(other)
|
||||||
|
except NotImplementedError:
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
return us == them
|
||||||
|
|
||||||
|
def __lt__(self, other):
|
||||||
|
try:
|
||||||
|
us, them = self._ensure_tuple(other)
|
||||||
|
except NotImplementedError:
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
# Since alphabetically "dev0" < "final" < "post1" < "post2", we don't
|
||||||
|
# have to do anything special with releaselevel for now.
|
||||||
|
return us < them
|
||||||
@ -0,0 +1,9 @@
|
|||||||
|
class VersionInfo:
|
||||||
|
@property
|
||||||
|
def year(self) -> int: ...
|
||||||
|
@property
|
||||||
|
def minor(self) -> int: ...
|
||||||
|
@property
|
||||||
|
def micro(self) -> int: ...
|
||||||
|
@property
|
||||||
|
def releaselevel(self) -> str: ...
|
||||||
@ -0,0 +1,151 @@
|
|||||||
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
"""
|
||||||
|
Commonly useful converters.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
import typing
|
||||||
|
|
||||||
|
from ._compat import _AnnotationExtractor
|
||||||
|
from ._make import NOTHING, Factory, pipe
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"default_if_none",
|
||||||
|
"optional",
|
||||||
|
"pipe",
|
||||||
|
"to_bool",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def optional(converter):
|
||||||
|
"""
|
||||||
|
A converter that allows an attribute to be optional. An optional attribute
|
||||||
|
is one which can be set to `None`.
|
||||||
|
|
||||||
|
Type annotations will be inferred from the wrapped converter's, if it has
|
||||||
|
any.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
converter (typing.Callable):
|
||||||
|
the converter that is used for non-`None` values.
|
||||||
|
|
||||||
|
.. versionadded:: 17.1.0
|
||||||
|
"""
|
||||||
|
|
||||||
|
def optional_converter(val):
|
||||||
|
if val is None:
|
||||||
|
return None
|
||||||
|
return converter(val)
|
||||||
|
|
||||||
|
xtr = _AnnotationExtractor(converter)
|
||||||
|
|
||||||
|
t = xtr.get_first_param_type()
|
||||||
|
if t:
|
||||||
|
optional_converter.__annotations__["val"] = typing.Optional[t]
|
||||||
|
|
||||||
|
rt = xtr.get_return_type()
|
||||||
|
if rt:
|
||||||
|
optional_converter.__annotations__["return"] = typing.Optional[rt]
|
||||||
|
|
||||||
|
return optional_converter
|
||||||
|
|
||||||
|
|
||||||
|
def default_if_none(default=NOTHING, factory=None):
|
||||||
|
"""
|
||||||
|
A converter that allows to replace `None` values by *default* or the result
|
||||||
|
of *factory*.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
default:
|
||||||
|
Value to be used if `None` is passed. Passing an instance of
|
||||||
|
`attrs.Factory` is supported, however the ``takes_self`` option is
|
||||||
|
*not*.
|
||||||
|
|
||||||
|
factory (typing.Callable):
|
||||||
|
A callable that takes no parameters whose result is used if `None`
|
||||||
|
is passed.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
TypeError: If **neither** *default* or *factory* is passed.
|
||||||
|
|
||||||
|
TypeError: If **both** *default* and *factory* are passed.
|
||||||
|
|
||||||
|
ValueError:
|
||||||
|
If an instance of `attrs.Factory` is passed with
|
||||||
|
``takes_self=True``.
|
||||||
|
|
||||||
|
.. versionadded:: 18.2.0
|
||||||
|
"""
|
||||||
|
if default is NOTHING and factory is None:
|
||||||
|
msg = "Must pass either `default` or `factory`."
|
||||||
|
raise TypeError(msg)
|
||||||
|
|
||||||
|
if default is not NOTHING and factory is not None:
|
||||||
|
msg = "Must pass either `default` or `factory` but not both."
|
||||||
|
raise TypeError(msg)
|
||||||
|
|
||||||
|
if factory is not None:
|
||||||
|
default = Factory(factory)
|
||||||
|
|
||||||
|
if isinstance(default, Factory):
|
||||||
|
if default.takes_self:
|
||||||
|
msg = "`takes_self` is not supported by default_if_none."
|
||||||
|
raise ValueError(msg)
|
||||||
|
|
||||||
|
def default_if_none_converter(val):
|
||||||
|
if val is not None:
|
||||||
|
return val
|
||||||
|
|
||||||
|
return default.factory()
|
||||||
|
|
||||||
|
else:
|
||||||
|
|
||||||
|
def default_if_none_converter(val):
|
||||||
|
if val is not None:
|
||||||
|
return val
|
||||||
|
|
||||||
|
return default
|
||||||
|
|
||||||
|
return default_if_none_converter
|
||||||
|
|
||||||
|
|
||||||
|
def to_bool(val):
|
||||||
|
"""
|
||||||
|
Convert "boolean" strings (for example, from environment variables) to real
|
||||||
|
booleans.
|
||||||
|
|
||||||
|
Values mapping to `True`:
|
||||||
|
|
||||||
|
- ``True``
|
||||||
|
- ``"true"`` / ``"t"``
|
||||||
|
- ``"yes"`` / ``"y"``
|
||||||
|
- ``"on"``
|
||||||
|
- ``"1"``
|
||||||
|
- ``1``
|
||||||
|
|
||||||
|
Values mapping to `False`:
|
||||||
|
|
||||||
|
- ``False``
|
||||||
|
- ``"false"`` / ``"f"``
|
||||||
|
- ``"no"`` / ``"n"``
|
||||||
|
- ``"off"``
|
||||||
|
- ``"0"``
|
||||||
|
- ``0``
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: For any other value.
|
||||||
|
|
||||||
|
.. versionadded:: 21.3.0
|
||||||
|
"""
|
||||||
|
if isinstance(val, str):
|
||||||
|
val = val.lower()
|
||||||
|
|
||||||
|
if val in (True, "true", "t", "yes", "y", "on", "1", 1):
|
||||||
|
return True
|
||||||
|
if val in (False, "false", "f", "no", "n", "off", "0", 0):
|
||||||
|
return False
|
||||||
|
|
||||||
|
msg = f"Cannot convert value to bool: {val!r}"
|
||||||
|
raise ValueError(msg)
|
||||||
@ -0,0 +1,13 @@
|
|||||||
|
from typing import Callable, TypeVar, overload
|
||||||
|
|
||||||
|
from attrs import _ConverterType
|
||||||
|
|
||||||
|
_T = TypeVar("_T")
|
||||||
|
|
||||||
|
def pipe(*validators: _ConverterType) -> _ConverterType: ...
|
||||||
|
def optional(converter: _ConverterType) -> _ConverterType: ...
|
||||||
|
@overload
|
||||||
|
def default_if_none(default: _T) -> _ConverterType: ...
|
||||||
|
@overload
|
||||||
|
def default_if_none(*, factory: Callable[[], _T]) -> _ConverterType: ...
|
||||||
|
def to_bool(val: str) -> bool: ...
|
||||||
@ -0,0 +1,95 @@
|
|||||||
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import ClassVar
|
||||||
|
|
||||||
|
|
||||||
|
class FrozenError(AttributeError):
|
||||||
|
"""
|
||||||
|
A frozen/immutable instance or attribute have been attempted to be
|
||||||
|
modified.
|
||||||
|
|
||||||
|
It mirrors the behavior of ``namedtuples`` by using the same error message
|
||||||
|
and subclassing `AttributeError`.
|
||||||
|
|
||||||
|
.. versionadded:: 20.1.0
|
||||||
|
"""
|
||||||
|
|
||||||
|
msg = "can't set attribute"
|
||||||
|
args: ClassVar[tuple[str]] = [msg]
|
||||||
|
|
||||||
|
|
||||||
|
class FrozenInstanceError(FrozenError):
|
||||||
|
"""
|
||||||
|
A frozen instance has been attempted to be modified.
|
||||||
|
|
||||||
|
.. versionadded:: 16.1.0
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class FrozenAttributeError(FrozenError):
|
||||||
|
"""
|
||||||
|
A frozen attribute has been attempted to be modified.
|
||||||
|
|
||||||
|
.. versionadded:: 20.1.0
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class AttrsAttributeNotFoundError(ValueError):
|
||||||
|
"""
|
||||||
|
An *attrs* function couldn't find an attribute that the user asked for.
|
||||||
|
|
||||||
|
.. versionadded:: 16.2.0
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class NotAnAttrsClassError(ValueError):
|
||||||
|
"""
|
||||||
|
A non-*attrs* class has been passed into an *attrs* function.
|
||||||
|
|
||||||
|
.. versionadded:: 16.2.0
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class DefaultAlreadySetError(RuntimeError):
|
||||||
|
"""
|
||||||
|
A default has been set when defining the field and is attempted to be reset
|
||||||
|
using the decorator.
|
||||||
|
|
||||||
|
.. versionadded:: 17.1.0
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class UnannotatedAttributeError(RuntimeError):
|
||||||
|
"""
|
||||||
|
A class with ``auto_attribs=True`` has a field without a type annotation.
|
||||||
|
|
||||||
|
.. versionadded:: 17.3.0
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class PythonTooOldError(RuntimeError):
|
||||||
|
"""
|
||||||
|
It was attempted to use an *attrs* feature that requires a newer Python
|
||||||
|
version.
|
||||||
|
|
||||||
|
.. versionadded:: 18.2.0
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class NotCallableError(TypeError):
|
||||||
|
"""
|
||||||
|
A field requiring a callable has been set with a value that is not
|
||||||
|
callable.
|
||||||
|
|
||||||
|
.. versionadded:: 19.2.0
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, msg, value):
|
||||||
|
super(TypeError, self).__init__(msg, value)
|
||||||
|
self.msg = msg
|
||||||
|
self.value = value
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return str(self.msg)
|
||||||
@ -0,0 +1,17 @@
|
|||||||
|
from typing import Any
|
||||||
|
|
||||||
|
class FrozenError(AttributeError):
|
||||||
|
msg: str = ...
|
||||||
|
|
||||||
|
class FrozenInstanceError(FrozenError): ...
|
||||||
|
class FrozenAttributeError(FrozenError): ...
|
||||||
|
class AttrsAttributeNotFoundError(ValueError): ...
|
||||||
|
class NotAnAttrsClassError(ValueError): ...
|
||||||
|
class DefaultAlreadySetError(RuntimeError): ...
|
||||||
|
class UnannotatedAttributeError(RuntimeError): ...
|
||||||
|
class PythonTooOldError(RuntimeError): ...
|
||||||
|
|
||||||
|
class NotCallableError(TypeError):
|
||||||
|
msg: str = ...
|
||||||
|
value: Any = ...
|
||||||
|
def __init__(self, msg: str, value: Any) -> None: ...
|
||||||
@ -0,0 +1,72 @@
|
|||||||
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
"""
|
||||||
|
Commonly useful filters for `attrs.asdict` and `attrs.astuple`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from ._make import Attribute
|
||||||
|
|
||||||
|
|
||||||
|
def _split_what(what):
|
||||||
|
"""
|
||||||
|
Returns a tuple of `frozenset`s of classes and attributes.
|
||||||
|
"""
|
||||||
|
return (
|
||||||
|
frozenset(cls for cls in what if isinstance(cls, type)),
|
||||||
|
frozenset(cls for cls in what if isinstance(cls, str)),
|
||||||
|
frozenset(cls for cls in what if isinstance(cls, Attribute)),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def include(*what):
|
||||||
|
"""
|
||||||
|
Create a filter that only allows *what*.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
what (list[type, str, attrs.Attribute]):
|
||||||
|
What to include. Can be a type, a name, or an attribute.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Callable:
|
||||||
|
A callable that can be passed to `attrs.asdict`'s and
|
||||||
|
`attrs.astuple`'s *filter* argument.
|
||||||
|
|
||||||
|
.. versionchanged:: 23.1.0 Accept strings with field names.
|
||||||
|
"""
|
||||||
|
cls, names, attrs = _split_what(what)
|
||||||
|
|
||||||
|
def include_(attribute, value):
|
||||||
|
return (
|
||||||
|
value.__class__ in cls
|
||||||
|
or attribute.name in names
|
||||||
|
or attribute in attrs
|
||||||
|
)
|
||||||
|
|
||||||
|
return include_
|
||||||
|
|
||||||
|
|
||||||
|
def exclude(*what):
|
||||||
|
"""
|
||||||
|
Create a filter that does **not** allow *what*.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
what (list[type, str, attrs.Attribute]):
|
||||||
|
What to exclude. Can be a type, a name, or an attribute.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Callable:
|
||||||
|
A callable that can be passed to `attrs.asdict`'s and
|
||||||
|
`attrs.astuple`'s *filter* argument.
|
||||||
|
|
||||||
|
.. versionchanged:: 23.3.0 Accept field name string as input argument
|
||||||
|
"""
|
||||||
|
cls, names, attrs = _split_what(what)
|
||||||
|
|
||||||
|
def exclude_(attribute, value):
|
||||||
|
return not (
|
||||||
|
value.__class__ in cls
|
||||||
|
or attribute.name in names
|
||||||
|
or attribute in attrs
|
||||||
|
)
|
||||||
|
|
||||||
|
return exclude_
|
||||||
@ -0,0 +1,6 @@
|
|||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from . import Attribute, _FilterType
|
||||||
|
|
||||||
|
def include(*what: type | str | Attribute[Any]) -> _FilterType[Any]: ...
|
||||||
|
def exclude(*what: type | str | Attribute[Any]) -> _FilterType[Any]: ...
|
||||||
@ -0,0 +1,79 @@
|
|||||||
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
"""
|
||||||
|
Commonly used hooks for on_setattr.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from . import _config
|
||||||
|
from .exceptions import FrozenAttributeError
|
||||||
|
|
||||||
|
|
||||||
|
def pipe(*setters):
|
||||||
|
"""
|
||||||
|
Run all *setters* and return the return value of the last one.
|
||||||
|
|
||||||
|
.. versionadded:: 20.1.0
|
||||||
|
"""
|
||||||
|
|
||||||
|
def wrapped_pipe(instance, attrib, new_value):
|
||||||
|
rv = new_value
|
||||||
|
|
||||||
|
for setter in setters:
|
||||||
|
rv = setter(instance, attrib, rv)
|
||||||
|
|
||||||
|
return rv
|
||||||
|
|
||||||
|
return wrapped_pipe
|
||||||
|
|
||||||
|
|
||||||
|
def frozen(_, __, ___):
|
||||||
|
"""
|
||||||
|
Prevent an attribute to be modified.
|
||||||
|
|
||||||
|
.. versionadded:: 20.1.0
|
||||||
|
"""
|
||||||
|
raise FrozenAttributeError()
|
||||||
|
|
||||||
|
|
||||||
|
def validate(instance, attrib, new_value):
|
||||||
|
"""
|
||||||
|
Run *attrib*'s validator on *new_value* if it has one.
|
||||||
|
|
||||||
|
.. versionadded:: 20.1.0
|
||||||
|
"""
|
||||||
|
if _config._run_validators is False:
|
||||||
|
return new_value
|
||||||
|
|
||||||
|
v = attrib.validator
|
||||||
|
if not v:
|
||||||
|
return new_value
|
||||||
|
|
||||||
|
v(instance, attrib, new_value)
|
||||||
|
|
||||||
|
return new_value
|
||||||
|
|
||||||
|
|
||||||
|
def convert(instance, attrib, new_value):
|
||||||
|
"""
|
||||||
|
Run *attrib*'s converter -- if it has one -- on *new_value* and return the
|
||||||
|
result.
|
||||||
|
|
||||||
|
.. versionadded:: 20.1.0
|
||||||
|
"""
|
||||||
|
c = attrib.converter
|
||||||
|
if c:
|
||||||
|
# This can be removed once we drop 3.8 and use attrs.Converter instead.
|
||||||
|
from ._make import Converter
|
||||||
|
|
||||||
|
if not isinstance(c, Converter):
|
||||||
|
return c(new_value)
|
||||||
|
|
||||||
|
return c(new_value, instance, attrib)
|
||||||
|
|
||||||
|
return new_value
|
||||||
|
|
||||||
|
|
||||||
|
# Sentinel for disabling class-wide *on_setattr* hooks for certain attributes.
|
||||||
|
# Sphinx's autodata stopped working, so the docstring is inlined in the API
|
||||||
|
# docs.
|
||||||
|
NO_OP = object()
|
||||||
@ -0,0 +1,20 @@
|
|||||||
|
from typing import Any, NewType, NoReturn, TypeVar
|
||||||
|
|
||||||
|
from . import Attribute
|
||||||
|
from attrs import _OnSetAttrType
|
||||||
|
|
||||||
|
_T = TypeVar("_T")
|
||||||
|
|
||||||
|
def frozen(
|
||||||
|
instance: Any, attribute: Attribute[Any], new_value: Any
|
||||||
|
) -> NoReturn: ...
|
||||||
|
def pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ...
|
||||||
|
def validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ...
|
||||||
|
|
||||||
|
# convert is allowed to return Any, because they can be chained using pipe.
|
||||||
|
def convert(
|
||||||
|
instance: Any, attribute: Attribute[Any], new_value: Any
|
||||||
|
) -> Any: ...
|
||||||
|
|
||||||
|
_NoOpType = NewType("_NoOpType", object)
|
||||||
|
NO_OP: _NoOpType
|
||||||
@ -0,0 +1,711 @@
|
|||||||
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
"""
|
||||||
|
Commonly useful validators.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
import operator
|
||||||
|
import re
|
||||||
|
|
||||||
|
from contextlib import contextmanager
|
||||||
|
from re import Pattern
|
||||||
|
|
||||||
|
from ._config import get_run_validators, set_run_validators
|
||||||
|
from ._make import _AndValidator, and_, attrib, attrs
|
||||||
|
from .converters import default_if_none
|
||||||
|
from .exceptions import NotCallableError
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"and_",
|
||||||
|
"deep_iterable",
|
||||||
|
"deep_mapping",
|
||||||
|
"disabled",
|
||||||
|
"ge",
|
||||||
|
"get_disabled",
|
||||||
|
"gt",
|
||||||
|
"in_",
|
||||||
|
"instance_of",
|
||||||
|
"is_callable",
|
||||||
|
"le",
|
||||||
|
"lt",
|
||||||
|
"matches_re",
|
||||||
|
"max_len",
|
||||||
|
"min_len",
|
||||||
|
"not_",
|
||||||
|
"optional",
|
||||||
|
"or_",
|
||||||
|
"set_disabled",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def set_disabled(disabled):
|
||||||
|
"""
|
||||||
|
Globally disable or enable running validators.
|
||||||
|
|
||||||
|
By default, they are run.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
disabled (bool): If `True`, disable running all validators.
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
|
||||||
|
This function is not thread-safe!
|
||||||
|
|
||||||
|
.. versionadded:: 21.3.0
|
||||||
|
"""
|
||||||
|
set_run_validators(not disabled)
|
||||||
|
|
||||||
|
|
||||||
|
def get_disabled():
|
||||||
|
"""
|
||||||
|
Return a bool indicating whether validators are currently disabled or not.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool:`True` if validators are currently disabled.
|
||||||
|
|
||||||
|
.. versionadded:: 21.3.0
|
||||||
|
"""
|
||||||
|
return not get_run_validators()
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def disabled():
|
||||||
|
"""
|
||||||
|
Context manager that disables running validators within its context.
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
|
||||||
|
This context manager is not thread-safe!
|
||||||
|
|
||||||
|
.. versionadded:: 21.3.0
|
||||||
|
"""
|
||||||
|
set_run_validators(False)
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
set_run_validators(True)
|
||||||
|
|
||||||
|
|
||||||
|
@attrs(repr=False, slots=True, unsafe_hash=True)
|
||||||
|
class _InstanceOfValidator:
|
||||||
|
type = attrib()
|
||||||
|
|
||||||
|
def __call__(self, inst, attr, value):
|
||||||
|
"""
|
||||||
|
We use a callable class to be able to change the ``__repr__``.
|
||||||
|
"""
|
||||||
|
if not isinstance(value, self.type):
|
||||||
|
msg = f"'{attr.name}' must be {self.type!r} (got {value!r} that is a {value.__class__!r})."
|
||||||
|
raise TypeError(
|
||||||
|
msg,
|
||||||
|
attr,
|
||||||
|
self.type,
|
||||||
|
value,
|
||||||
|
)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"<instance_of validator for type {self.type!r}>"
|
||||||
|
|
||||||
|
|
||||||
|
def instance_of(type):
|
||||||
|
"""
|
||||||
|
A validator that raises a `TypeError` if the initializer is called with a
|
||||||
|
wrong type for this particular attribute (checks are performed using
|
||||||
|
`isinstance` therefore it's also valid to pass a tuple of types).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
type (type | tuple[type]): The type to check for.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
TypeError:
|
||||||
|
With a human readable error message, the attribute (of type
|
||||||
|
`attrs.Attribute`), the expected type, and the value it got.
|
||||||
|
"""
|
||||||
|
return _InstanceOfValidator(type)
|
||||||
|
|
||||||
|
|
||||||
|
@attrs(repr=False, frozen=True, slots=True)
|
||||||
|
class _MatchesReValidator:
|
||||||
|
pattern = attrib()
|
||||||
|
match_func = attrib()
|
||||||
|
|
||||||
|
def __call__(self, inst, attr, value):
|
||||||
|
"""
|
||||||
|
We use a callable class to be able to change the ``__repr__``.
|
||||||
|
"""
|
||||||
|
if not self.match_func(value):
|
||||||
|
msg = f"'{attr.name}' must match regex {self.pattern.pattern!r} ({value!r} doesn't)"
|
||||||
|
raise ValueError(
|
||||||
|
msg,
|
||||||
|
attr,
|
||||||
|
self.pattern,
|
||||||
|
value,
|
||||||
|
)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"<matches_re validator for pattern {self.pattern!r}>"
|
||||||
|
|
||||||
|
|
||||||
|
def matches_re(regex, flags=0, func=None):
|
||||||
|
r"""
|
||||||
|
A validator that raises `ValueError` if the initializer is called with a
|
||||||
|
string that doesn't match *regex*.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
regex (str, re.Pattern):
|
||||||
|
A regex string or precompiled pattern to match against
|
||||||
|
|
||||||
|
flags (int):
|
||||||
|
Flags that will be passed to the underlying re function (default 0)
|
||||||
|
|
||||||
|
func (typing.Callable):
|
||||||
|
Which underlying `re` function to call. Valid options are
|
||||||
|
`re.fullmatch`, `re.search`, and `re.match`; the default `None`
|
||||||
|
means `re.fullmatch`. For performance reasons, the pattern is
|
||||||
|
always precompiled using `re.compile`.
|
||||||
|
|
||||||
|
.. versionadded:: 19.2.0
|
||||||
|
.. versionchanged:: 21.3.0 *regex* can be a pre-compiled pattern.
|
||||||
|
"""
|
||||||
|
valid_funcs = (re.fullmatch, None, re.search, re.match)
|
||||||
|
if func not in valid_funcs:
|
||||||
|
msg = "'func' must be one of {}.".format(
|
||||||
|
", ".join(
|
||||||
|
sorted(e and e.__name__ or "None" for e in set(valid_funcs))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
raise ValueError(msg)
|
||||||
|
|
||||||
|
if isinstance(regex, Pattern):
|
||||||
|
if flags:
|
||||||
|
msg = "'flags' can only be used with a string pattern; pass flags to re.compile() instead"
|
||||||
|
raise TypeError(msg)
|
||||||
|
pattern = regex
|
||||||
|
else:
|
||||||
|
pattern = re.compile(regex, flags)
|
||||||
|
|
||||||
|
if func is re.match:
|
||||||
|
match_func = pattern.match
|
||||||
|
elif func is re.search:
|
||||||
|
match_func = pattern.search
|
||||||
|
else:
|
||||||
|
match_func = pattern.fullmatch
|
||||||
|
|
||||||
|
return _MatchesReValidator(pattern, match_func)
|
||||||
|
|
||||||
|
|
||||||
|
@attrs(repr=False, slots=True, unsafe_hash=True)
|
||||||
|
class _OptionalValidator:
|
||||||
|
validator = attrib()
|
||||||
|
|
||||||
|
def __call__(self, inst, attr, value):
|
||||||
|
if value is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
self.validator(inst, attr, value)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"<optional validator for {self.validator!r} or None>"
|
||||||
|
|
||||||
|
|
||||||
|
def optional(validator):
|
||||||
|
"""
|
||||||
|
A validator that makes an attribute optional. An optional attribute is one
|
||||||
|
which can be set to `None` in addition to satisfying the requirements of
|
||||||
|
the sub-validator.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
validator
|
||||||
|
(typing.Callable | tuple[typing.Callable] | list[typing.Callable]):
|
||||||
|
A validator (or validators) that is used for non-`None` values.
|
||||||
|
|
||||||
|
.. versionadded:: 15.1.0
|
||||||
|
.. versionchanged:: 17.1.0 *validator* can be a list of validators.
|
||||||
|
.. versionchanged:: 23.1.0 *validator* can also be a tuple of validators.
|
||||||
|
"""
|
||||||
|
if isinstance(validator, (list, tuple)):
|
||||||
|
return _OptionalValidator(_AndValidator(validator))
|
||||||
|
|
||||||
|
return _OptionalValidator(validator)
|
||||||
|
|
||||||
|
|
||||||
|
@attrs(repr=False, slots=True, unsafe_hash=True)
|
||||||
|
class _InValidator:
|
||||||
|
options = attrib()
|
||||||
|
_original_options = attrib(hash=False)
|
||||||
|
|
||||||
|
def __call__(self, inst, attr, value):
|
||||||
|
try:
|
||||||
|
in_options = value in self.options
|
||||||
|
except TypeError: # e.g. `1 in "abc"`
|
||||||
|
in_options = False
|
||||||
|
|
||||||
|
if not in_options:
|
||||||
|
msg = f"'{attr.name}' must be in {self._original_options!r} (got {value!r})"
|
||||||
|
raise ValueError(
|
||||||
|
msg,
|
||||||
|
attr,
|
||||||
|
self._original_options,
|
||||||
|
value,
|
||||||
|
)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"<in_ validator with options {self._original_options!r}>"
|
||||||
|
|
||||||
|
|
||||||
|
def in_(options):
|
||||||
|
"""
|
||||||
|
A validator that raises a `ValueError` if the initializer is called with a
|
||||||
|
value that does not belong in the *options* provided.
|
||||||
|
|
||||||
|
The check is performed using ``value in options``, so *options* has to
|
||||||
|
support that operation.
|
||||||
|
|
||||||
|
To keep the validator hashable, dicts, lists, and sets are transparently
|
||||||
|
transformed into a `tuple`.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
options: Allowed options.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError:
|
||||||
|
With a human readable error message, the attribute (of type
|
||||||
|
`attrs.Attribute`), the expected options, and the value it got.
|
||||||
|
|
||||||
|
.. versionadded:: 17.1.0
|
||||||
|
.. versionchanged:: 22.1.0
|
||||||
|
The ValueError was incomplete until now and only contained the human
|
||||||
|
readable error message. Now it contains all the information that has
|
||||||
|
been promised since 17.1.0.
|
||||||
|
.. versionchanged:: 24.1.0
|
||||||
|
*options* that are a list, dict, or a set are now transformed into a
|
||||||
|
tuple to keep the validator hashable.
|
||||||
|
"""
|
||||||
|
repr_options = options
|
||||||
|
if isinstance(options, (list, dict, set)):
|
||||||
|
options = tuple(options)
|
||||||
|
|
||||||
|
return _InValidator(options, repr_options)
|
||||||
|
|
||||||
|
|
||||||
|
@attrs(repr=False, slots=False, unsafe_hash=True)
|
||||||
|
class _IsCallableValidator:
|
||||||
|
def __call__(self, inst, attr, value):
|
||||||
|
"""
|
||||||
|
We use a callable class to be able to change the ``__repr__``.
|
||||||
|
"""
|
||||||
|
if not callable(value):
|
||||||
|
message = (
|
||||||
|
"'{name}' must be callable "
|
||||||
|
"(got {value!r} that is a {actual!r})."
|
||||||
|
)
|
||||||
|
raise NotCallableError(
|
||||||
|
msg=message.format(
|
||||||
|
name=attr.name, value=value, actual=value.__class__
|
||||||
|
),
|
||||||
|
value=value,
|
||||||
|
)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "<is_callable validator>"
|
||||||
|
|
||||||
|
|
||||||
|
def is_callable():
|
||||||
|
"""
|
||||||
|
A validator that raises a `attrs.exceptions.NotCallableError` if the
|
||||||
|
initializer is called with a value for this particular attribute that is
|
||||||
|
not callable.
|
||||||
|
|
||||||
|
.. versionadded:: 19.1.0
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
attrs.exceptions.NotCallableError:
|
||||||
|
With a human readable error message containing the attribute
|
||||||
|
(`attrs.Attribute`) name, and the value it got.
|
||||||
|
"""
|
||||||
|
return _IsCallableValidator()
|
||||||
|
|
||||||
|
|
||||||
|
@attrs(repr=False, slots=True, unsafe_hash=True)
|
||||||
|
class _DeepIterable:
|
||||||
|
member_validator = attrib(validator=is_callable())
|
||||||
|
iterable_validator = attrib(
|
||||||
|
default=None, validator=optional(is_callable())
|
||||||
|
)
|
||||||
|
|
||||||
|
def __call__(self, inst, attr, value):
|
||||||
|
"""
|
||||||
|
We use a callable class to be able to change the ``__repr__``.
|
||||||
|
"""
|
||||||
|
if self.iterable_validator is not None:
|
||||||
|
self.iterable_validator(inst, attr, value)
|
||||||
|
|
||||||
|
for member in value:
|
||||||
|
self.member_validator(inst, attr, member)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
iterable_identifier = (
|
||||||
|
""
|
||||||
|
if self.iterable_validator is None
|
||||||
|
else f" {self.iterable_validator!r}"
|
||||||
|
)
|
||||||
|
return (
|
||||||
|
f"<deep_iterable validator for{iterable_identifier}"
|
||||||
|
f" iterables of {self.member_validator!r}>"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def deep_iterable(member_validator, iterable_validator=None):
|
||||||
|
"""
|
||||||
|
A validator that performs deep validation of an iterable.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
member_validator: Validator to apply to iterable members.
|
||||||
|
|
||||||
|
iterable_validator:
|
||||||
|
Validator to apply to iterable itself (optional).
|
||||||
|
|
||||||
|
Raises
|
||||||
|
TypeError: if any sub-validators fail
|
||||||
|
|
||||||
|
.. versionadded:: 19.1.0
|
||||||
|
"""
|
||||||
|
if isinstance(member_validator, (list, tuple)):
|
||||||
|
member_validator = and_(*member_validator)
|
||||||
|
return _DeepIterable(member_validator, iterable_validator)
|
||||||
|
|
||||||
|
|
||||||
|
@attrs(repr=False, slots=True, unsafe_hash=True)
|
||||||
|
class _DeepMapping:
|
||||||
|
key_validator = attrib(validator=is_callable())
|
||||||
|
value_validator = attrib(validator=is_callable())
|
||||||
|
mapping_validator = attrib(default=None, validator=optional(is_callable()))
|
||||||
|
|
||||||
|
def __call__(self, inst, attr, value):
|
||||||
|
"""
|
||||||
|
We use a callable class to be able to change the ``__repr__``.
|
||||||
|
"""
|
||||||
|
if self.mapping_validator is not None:
|
||||||
|
self.mapping_validator(inst, attr, value)
|
||||||
|
|
||||||
|
for key in value:
|
||||||
|
self.key_validator(inst, attr, key)
|
||||||
|
self.value_validator(inst, attr, value[key])
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"<deep_mapping validator for objects mapping {self.key_validator!r} to {self.value_validator!r}>"
|
||||||
|
|
||||||
|
|
||||||
|
def deep_mapping(key_validator, value_validator, mapping_validator=None):
|
||||||
|
"""
|
||||||
|
A validator that performs deep validation of a dictionary.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key_validator: Validator to apply to dictionary keys.
|
||||||
|
|
||||||
|
value_validator: Validator to apply to dictionary values.
|
||||||
|
|
||||||
|
mapping_validator:
|
||||||
|
Validator to apply to top-level mapping attribute (optional).
|
||||||
|
|
||||||
|
.. versionadded:: 19.1.0
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
TypeError: if any sub-validators fail
|
||||||
|
"""
|
||||||
|
return _DeepMapping(key_validator, value_validator, mapping_validator)
|
||||||
|
|
||||||
|
|
||||||
|
@attrs(repr=False, frozen=True, slots=True)
|
||||||
|
class _NumberValidator:
|
||||||
|
bound = attrib()
|
||||||
|
compare_op = attrib()
|
||||||
|
compare_func = attrib()
|
||||||
|
|
||||||
|
def __call__(self, inst, attr, value):
|
||||||
|
"""
|
||||||
|
We use a callable class to be able to change the ``__repr__``.
|
||||||
|
"""
|
||||||
|
if not self.compare_func(value, self.bound):
|
||||||
|
msg = f"'{attr.name}' must be {self.compare_op} {self.bound}: {value}"
|
||||||
|
raise ValueError(msg)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"<Validator for x {self.compare_op} {self.bound}>"
|
||||||
|
|
||||||
|
|
||||||
|
def lt(val):
|
||||||
|
"""
|
||||||
|
A validator that raises `ValueError` if the initializer is called with a
|
||||||
|
number larger or equal to *val*.
|
||||||
|
|
||||||
|
The validator uses `operator.lt` to compare the values.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
val: Exclusive upper bound for values.
|
||||||
|
|
||||||
|
.. versionadded:: 21.3.0
|
||||||
|
"""
|
||||||
|
return _NumberValidator(val, "<", operator.lt)
|
||||||
|
|
||||||
|
|
||||||
|
def le(val):
|
||||||
|
"""
|
||||||
|
A validator that raises `ValueError` if the initializer is called with a
|
||||||
|
number greater than *val*.
|
||||||
|
|
||||||
|
The validator uses `operator.le` to compare the values.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
val: Inclusive upper bound for values.
|
||||||
|
|
||||||
|
.. versionadded:: 21.3.0
|
||||||
|
"""
|
||||||
|
return _NumberValidator(val, "<=", operator.le)
|
||||||
|
|
||||||
|
|
||||||
|
def ge(val):
|
||||||
|
"""
|
||||||
|
A validator that raises `ValueError` if the initializer is called with a
|
||||||
|
number smaller than *val*.
|
||||||
|
|
||||||
|
The validator uses `operator.ge` to compare the values.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
val: Inclusive lower bound for values
|
||||||
|
|
||||||
|
.. versionadded:: 21.3.0
|
||||||
|
"""
|
||||||
|
return _NumberValidator(val, ">=", operator.ge)
|
||||||
|
|
||||||
|
|
||||||
|
def gt(val):
|
||||||
|
"""
|
||||||
|
A validator that raises `ValueError` if the initializer is called with a
|
||||||
|
number smaller or equal to *val*.
|
||||||
|
|
||||||
|
The validator uses `operator.ge` to compare the values.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
val: Exclusive lower bound for values
|
||||||
|
|
||||||
|
.. versionadded:: 21.3.0
|
||||||
|
"""
|
||||||
|
return _NumberValidator(val, ">", operator.gt)
|
||||||
|
|
||||||
|
|
||||||
|
@attrs(repr=False, frozen=True, slots=True)
|
||||||
|
class _MaxLengthValidator:
|
||||||
|
max_length = attrib()
|
||||||
|
|
||||||
|
def __call__(self, inst, attr, value):
|
||||||
|
"""
|
||||||
|
We use a callable class to be able to change the ``__repr__``.
|
||||||
|
"""
|
||||||
|
if len(value) > self.max_length:
|
||||||
|
msg = f"Length of '{attr.name}' must be <= {self.max_length}: {len(value)}"
|
||||||
|
raise ValueError(msg)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"<max_len validator for {self.max_length}>"
|
||||||
|
|
||||||
|
|
||||||
|
def max_len(length):
|
||||||
|
"""
|
||||||
|
A validator that raises `ValueError` if the initializer is called
|
||||||
|
with a string or iterable that is longer than *length*.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
length (int): Maximum length of the string or iterable
|
||||||
|
|
||||||
|
.. versionadded:: 21.3.0
|
||||||
|
"""
|
||||||
|
return _MaxLengthValidator(length)
|
||||||
|
|
||||||
|
|
||||||
|
@attrs(repr=False, frozen=True, slots=True)
|
||||||
|
class _MinLengthValidator:
|
||||||
|
min_length = attrib()
|
||||||
|
|
||||||
|
def __call__(self, inst, attr, value):
|
||||||
|
"""
|
||||||
|
We use a callable class to be able to change the ``__repr__``.
|
||||||
|
"""
|
||||||
|
if len(value) < self.min_length:
|
||||||
|
msg = f"Length of '{attr.name}' must be >= {self.min_length}: {len(value)}"
|
||||||
|
raise ValueError(msg)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"<min_len validator for {self.min_length}>"
|
||||||
|
|
||||||
|
|
||||||
|
def min_len(length):
|
||||||
|
"""
|
||||||
|
A validator that raises `ValueError` if the initializer is called
|
||||||
|
with a string or iterable that is shorter than *length*.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
length (int): Minimum length of the string or iterable
|
||||||
|
|
||||||
|
.. versionadded:: 22.1.0
|
||||||
|
"""
|
||||||
|
return _MinLengthValidator(length)
|
||||||
|
|
||||||
|
|
||||||
|
@attrs(repr=False, slots=True, unsafe_hash=True)
|
||||||
|
class _SubclassOfValidator:
|
||||||
|
type = attrib()
|
||||||
|
|
||||||
|
def __call__(self, inst, attr, value):
|
||||||
|
"""
|
||||||
|
We use a callable class to be able to change the ``__repr__``.
|
||||||
|
"""
|
||||||
|
if not issubclass(value, self.type):
|
||||||
|
msg = f"'{attr.name}' must be a subclass of {self.type!r} (got {value!r})."
|
||||||
|
raise TypeError(
|
||||||
|
msg,
|
||||||
|
attr,
|
||||||
|
self.type,
|
||||||
|
value,
|
||||||
|
)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"<subclass_of validator for type {self.type!r}>"
|
||||||
|
|
||||||
|
|
||||||
|
def _subclass_of(type):
|
||||||
|
"""
|
||||||
|
A validator that raises a `TypeError` if the initializer is called with a
|
||||||
|
wrong type for this particular attribute (checks are performed using
|
||||||
|
`issubclass` therefore it's also valid to pass a tuple of types).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
type (type | tuple[type, ...]): The type(s) to check for.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
TypeError:
|
||||||
|
With a human readable error message, the attribute (of type
|
||||||
|
`attrs.Attribute`), the expected type, and the value it got.
|
||||||
|
"""
|
||||||
|
return _SubclassOfValidator(type)
|
||||||
|
|
||||||
|
|
||||||
|
@attrs(repr=False, slots=True, unsafe_hash=True)
|
||||||
|
class _NotValidator:
|
||||||
|
validator = attrib()
|
||||||
|
msg = attrib(
|
||||||
|
converter=default_if_none(
|
||||||
|
"not_ validator child '{validator!r}' "
|
||||||
|
"did not raise a captured error"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
exc_types = attrib(
|
||||||
|
validator=deep_iterable(
|
||||||
|
member_validator=_subclass_of(Exception),
|
||||||
|
iterable_validator=instance_of(tuple),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
def __call__(self, inst, attr, value):
|
||||||
|
try:
|
||||||
|
self.validator(inst, attr, value)
|
||||||
|
except self.exc_types:
|
||||||
|
pass # suppress error to invert validity
|
||||||
|
else:
|
||||||
|
raise ValueError(
|
||||||
|
self.msg.format(
|
||||||
|
validator=self.validator,
|
||||||
|
exc_types=self.exc_types,
|
||||||
|
),
|
||||||
|
attr,
|
||||||
|
self.validator,
|
||||||
|
value,
|
||||||
|
self.exc_types,
|
||||||
|
)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"<not_ validator wrapping {self.validator!r}, capturing {self.exc_types!r}>"
|
||||||
|
|
||||||
|
|
||||||
|
def not_(validator, *, msg=None, exc_types=(ValueError, TypeError)):
|
||||||
|
"""
|
||||||
|
A validator that wraps and logically 'inverts' the validator passed to it.
|
||||||
|
It will raise a `ValueError` if the provided validator *doesn't* raise a
|
||||||
|
`ValueError` or `TypeError` (by default), and will suppress the exception
|
||||||
|
if the provided validator *does*.
|
||||||
|
|
||||||
|
Intended to be used with existing validators to compose logic without
|
||||||
|
needing to create inverted variants, for example, ``not_(in_(...))``.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
validator: A validator to be logically inverted.
|
||||||
|
|
||||||
|
msg (str):
|
||||||
|
Message to raise if validator fails. Formatted with keys
|
||||||
|
``exc_types`` and ``validator``.
|
||||||
|
|
||||||
|
exc_types (tuple[type, ...]):
|
||||||
|
Exception type(s) to capture. Other types raised by child
|
||||||
|
validators will not be intercepted and pass through.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError:
|
||||||
|
With a human readable error message, the attribute (of type
|
||||||
|
`attrs.Attribute`), the validator that failed to raise an
|
||||||
|
exception, the value it got, and the expected exception types.
|
||||||
|
|
||||||
|
.. versionadded:: 22.2.0
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
exc_types = tuple(exc_types)
|
||||||
|
except TypeError:
|
||||||
|
exc_types = (exc_types,)
|
||||||
|
return _NotValidator(validator, msg, exc_types)
|
||||||
|
|
||||||
|
|
||||||
|
@attrs(repr=False, slots=True, unsafe_hash=True)
|
||||||
|
class _OrValidator:
|
||||||
|
validators = attrib()
|
||||||
|
|
||||||
|
def __call__(self, inst, attr, value):
|
||||||
|
for v in self.validators:
|
||||||
|
try:
|
||||||
|
v(inst, attr, value)
|
||||||
|
except Exception: # noqa: BLE001, PERF203, S112
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
return
|
||||||
|
|
||||||
|
msg = f"None of {self.validators!r} satisfied for value {value!r}"
|
||||||
|
raise ValueError(msg)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"<or validator wrapping {self.validators!r}>"
|
||||||
|
|
||||||
|
|
||||||
|
def or_(*validators):
|
||||||
|
"""
|
||||||
|
A validator that composes multiple validators into one.
|
||||||
|
|
||||||
|
When called on a value, it runs all wrapped validators until one of them is
|
||||||
|
satisfied.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
validators (~collections.abc.Iterable[typing.Callable]):
|
||||||
|
Arbitrary number of validators.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError:
|
||||||
|
If no validator is satisfied. Raised with a human-readable error
|
||||||
|
message listing all the wrapped validators and the value that
|
||||||
|
failed all of them.
|
||||||
|
|
||||||
|
.. versionadded:: 24.1.0
|
||||||
|
"""
|
||||||
|
vals = []
|
||||||
|
for v in validators:
|
||||||
|
vals.extend(v.validators if isinstance(v, _OrValidator) else [v])
|
||||||
|
|
||||||
|
return _OrValidator(tuple(vals))
|
||||||
@ -0,0 +1,83 @@
|
|||||||
|
from typing import (
|
||||||
|
Any,
|
||||||
|
AnyStr,
|
||||||
|
Callable,
|
||||||
|
Container,
|
||||||
|
ContextManager,
|
||||||
|
Iterable,
|
||||||
|
Mapping,
|
||||||
|
Match,
|
||||||
|
Pattern,
|
||||||
|
TypeVar,
|
||||||
|
overload,
|
||||||
|
)
|
||||||
|
|
||||||
|
from attrs import _ValidatorType
|
||||||
|
from attrs import _ValidatorArgType
|
||||||
|
|
||||||
|
_T = TypeVar("_T")
|
||||||
|
_T1 = TypeVar("_T1")
|
||||||
|
_T2 = TypeVar("_T2")
|
||||||
|
_T3 = TypeVar("_T3")
|
||||||
|
_I = TypeVar("_I", bound=Iterable)
|
||||||
|
_K = TypeVar("_K")
|
||||||
|
_V = TypeVar("_V")
|
||||||
|
_M = TypeVar("_M", bound=Mapping)
|
||||||
|
|
||||||
|
def set_disabled(run: bool) -> None: ...
|
||||||
|
def get_disabled() -> bool: ...
|
||||||
|
def disabled() -> ContextManager[None]: ...
|
||||||
|
|
||||||
|
# To be more precise on instance_of use some overloads.
|
||||||
|
# If there are more than 3 items in the tuple then we fall back to Any
|
||||||
|
@overload
|
||||||
|
def instance_of(type: type[_T]) -> _ValidatorType[_T]: ...
|
||||||
|
@overload
|
||||||
|
def instance_of(type: tuple[type[_T]]) -> _ValidatorType[_T]: ...
|
||||||
|
@overload
|
||||||
|
def instance_of(
|
||||||
|
type: tuple[type[_T1], type[_T2]]
|
||||||
|
) -> _ValidatorType[_T1 | _T2]: ...
|
||||||
|
@overload
|
||||||
|
def instance_of(
|
||||||
|
type: tuple[type[_T1], type[_T2], type[_T3]]
|
||||||
|
) -> _ValidatorType[_T1 | _T2 | _T3]: ...
|
||||||
|
@overload
|
||||||
|
def instance_of(type: tuple[type, ...]) -> _ValidatorType[Any]: ...
|
||||||
|
def optional(
|
||||||
|
validator: (
|
||||||
|
_ValidatorType[_T]
|
||||||
|
| list[_ValidatorType[_T]]
|
||||||
|
| tuple[_ValidatorType[_T]]
|
||||||
|
),
|
||||||
|
) -> _ValidatorType[_T | None]: ...
|
||||||
|
def in_(options: Container[_T]) -> _ValidatorType[_T]: ...
|
||||||
|
def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ...
|
||||||
|
def matches_re(
|
||||||
|
regex: Pattern[AnyStr] | AnyStr,
|
||||||
|
flags: int = ...,
|
||||||
|
func: Callable[[AnyStr, AnyStr, int], Match[AnyStr] | None] | None = ...,
|
||||||
|
) -> _ValidatorType[AnyStr]: ...
|
||||||
|
def deep_iterable(
|
||||||
|
member_validator: _ValidatorArgType[_T],
|
||||||
|
iterable_validator: _ValidatorType[_I] | None = ...,
|
||||||
|
) -> _ValidatorType[_I]: ...
|
||||||
|
def deep_mapping(
|
||||||
|
key_validator: _ValidatorType[_K],
|
||||||
|
value_validator: _ValidatorType[_V],
|
||||||
|
mapping_validator: _ValidatorType[_M] | None = ...,
|
||||||
|
) -> _ValidatorType[_M]: ...
|
||||||
|
def is_callable() -> _ValidatorType[_T]: ...
|
||||||
|
def lt(val: _T) -> _ValidatorType[_T]: ...
|
||||||
|
def le(val: _T) -> _ValidatorType[_T]: ...
|
||||||
|
def ge(val: _T) -> _ValidatorType[_T]: ...
|
||||||
|
def gt(val: _T) -> _ValidatorType[_T]: ...
|
||||||
|
def max_len(length: int) -> _ValidatorType[_T]: ...
|
||||||
|
def min_len(length: int) -> _ValidatorType[_T]: ...
|
||||||
|
def not_(
|
||||||
|
validator: _ValidatorType[_T],
|
||||||
|
*,
|
||||||
|
msg: str | None = None,
|
||||||
|
exc_types: type[Exception] | Iterable[type[Exception]] = ...,
|
||||||
|
) -> _ValidatorType[_T]: ...
|
||||||
|
def or_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ...
|
||||||
@ -0,0 +1 @@
|
|||||||
|
pip
|
||||||
@ -0,0 +1,36 @@
|
|||||||
|
attr/__init__.py,sha256=l8Ewh5KZE7CCY0i1iDfSCnFiUTIkBVoqsXjX9EZnIVA,2087
|
||||||
|
attr/__init__.pyi,sha256=aTVHBPX6krCGvbQvOl_UKqEzmi2HFsaIVm2WKmAiqVs,11434
|
||||||
|
attr/_cmp.py,sha256=3umHiBtgsEYtvNP_8XrQwTCdFoZIX4DEur76N-2a3X8,4123
|
||||||
|
attr/_cmp.pyi,sha256=U-_RU_UZOyPUEQzXE6RMYQQcjkZRY25wTH99sN0s7MM,368
|
||||||
|
attr/_compat.py,sha256=n2Uk3c-ywv0PkFfGlvqR7SzDXp4NOhWmNV_ZK6YfWoM,2958
|
||||||
|
attr/_config.py,sha256=z81Vt-GeT_2taxs1XZfmHx9TWlSxjPb6eZH1LTGsS54,843
|
||||||
|
attr/_funcs.py,sha256=SGDmNlED1TM3tgO9Ap2mfRfVI24XEAcrNQs7o2eBXHQ,17386
|
||||||
|
attr/_make.py,sha256=BjENJz5eJoojJVbCoupWjXLLEZJ7VID89lisLbQUlmQ,91479
|
||||||
|
attr/_next_gen.py,sha256=dhGb96VFg4kXBkS9Zdz1A2uxVJ99q_RT1hw3kLA9-uI,24630
|
||||||
|
attr/_typing_compat.pyi,sha256=XDP54TUn-ZKhD62TOQebmzrwFyomhUCoGRpclb6alRA,469
|
||||||
|
attr/_version_info.py,sha256=exSqb3b5E-fMSsgZAlEw9XcLpEgobPORCZpcaEglAM4,2121
|
||||||
|
attr/_version_info.pyi,sha256=x_M3L3WuB7r_ULXAWjx959udKQ4HLB8l-hsc1FDGNvk,209
|
||||||
|
attr/converters.py,sha256=vNa58pZi9V6uxBzl4t1QrHbQfkT4iRFAodyXe7lcgg0,3506
|
||||||
|
attr/converters.pyi,sha256=mpDoVFO3Cpx8xYSSV0iZFl7IAHuoNBglxKfxHvLj_sY,410
|
||||||
|
attr/exceptions.py,sha256=HRFq4iybmv7-DcZwyjl6M1euM2YeJVK_hFxuaBGAngI,1977
|
||||||
|
attr/exceptions.pyi,sha256=zZq8bCUnKAy9mDtBEw42ZhPhAUIHoTKedDQInJD883M,539
|
||||||
|
attr/filters.py,sha256=ZBiKWLp3R0LfCZsq7X11pn9WX8NslS2wXM4jsnLOGc8,1795
|
||||||
|
attr/filters.pyi,sha256=3J5BG-dTxltBk1_-RuNRUHrv2qu1v8v4aDNAQ7_mifA,208
|
||||||
|
attr/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
attr/setters.py,sha256=faMQeiBo_nbXYnPaQ1pq8PXeA7Zr-uNsVsPMiKCmxhc,1619
|
||||||
|
attr/setters.pyi,sha256=NnVkaFU1BB4JB8E4JuXyrzTUgvtMpj8p3wBdJY7uix4,584
|
||||||
|
attr/validators.py,sha256=985eTP6RHyon61YEauMJgyNy1rEOhJWiSXMJgRxPtrQ,20045
|
||||||
|
attr/validators.pyi,sha256=LjKf7AoXZfvGSfT3LRs61Qfln94konYyMUPoJJjOxK4,2502
|
||||||
|
attrs-24.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||||
|
attrs-24.2.0.dist-info/METADATA,sha256=3Jgk4lr9Y1SAqAcwOLPN_mpW0wc6VOGm-yHt1LsPIHw,11524
|
||||||
|
attrs-24.2.0.dist-info/RECORD,,
|
||||||
|
attrs-24.2.0.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87
|
||||||
|
attrs-24.2.0.dist-info/licenses/LICENSE,sha256=iCEVyV38KvHutnFPjsbVy8q_Znyv-HKfQkINpj9xTp8,1109
|
||||||
|
attrs/__init__.py,sha256=5FHo-EMFOX-g4ialSK4fwOjuoHzLISJDZCwoOl02Ty8,1071
|
||||||
|
attrs/__init__.pyi,sha256=o3l92VsD9kHz8sldEtb_tllBTs3TeL-vIBMTxo2Zc_4,7703
|
||||||
|
attrs/converters.py,sha256=8kQljrVwfSTRu8INwEk8SI0eGrzmWftsT7rM0EqyohM,76
|
||||||
|
attrs/exceptions.py,sha256=ACCCmg19-vDFaDPY9vFl199SPXCQMN_bENs4DALjzms,76
|
||||||
|
attrs/filters.py,sha256=VOUMZug9uEU6dUuA0dF1jInUK0PL3fLgP0VBS5d-CDE,73
|
||||||
|
attrs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
attrs/setters.py,sha256=eL1YidYQV3T2h9_SYIZSZR1FAcHGb1TuCTy0E0Lv2SU,73
|
||||||
|
attrs/validators.py,sha256=xcy6wD5TtTkdCG1f4XWbocPSO0faBjk5IfVJfP6SUj0,76
|
||||||
@ -0,0 +1,4 @@
|
|||||||
|
Wheel-Version: 1.0
|
||||||
|
Generator: hatchling 1.25.0
|
||||||
|
Root-Is-Purelib: true
|
||||||
|
Tag: py3-none-any
|
||||||
@ -0,0 +1,21 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2015 Hynek Schlawack and the attrs contributors
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
@ -0,0 +1,67 @@
|
|||||||
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
from attr import (
|
||||||
|
NOTHING,
|
||||||
|
Attribute,
|
||||||
|
AttrsInstance,
|
||||||
|
Converter,
|
||||||
|
Factory,
|
||||||
|
_make_getattr,
|
||||||
|
assoc,
|
||||||
|
cmp_using,
|
||||||
|
define,
|
||||||
|
evolve,
|
||||||
|
field,
|
||||||
|
fields,
|
||||||
|
fields_dict,
|
||||||
|
frozen,
|
||||||
|
has,
|
||||||
|
make_class,
|
||||||
|
mutable,
|
||||||
|
resolve_types,
|
||||||
|
validate,
|
||||||
|
)
|
||||||
|
from attr._next_gen import asdict, astuple
|
||||||
|
|
||||||
|
from . import converters, exceptions, filters, setters, validators
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"__author__",
|
||||||
|
"__copyright__",
|
||||||
|
"__description__",
|
||||||
|
"__doc__",
|
||||||
|
"__email__",
|
||||||
|
"__license__",
|
||||||
|
"__title__",
|
||||||
|
"__url__",
|
||||||
|
"__version__",
|
||||||
|
"__version_info__",
|
||||||
|
"asdict",
|
||||||
|
"assoc",
|
||||||
|
"astuple",
|
||||||
|
"Attribute",
|
||||||
|
"AttrsInstance",
|
||||||
|
"cmp_using",
|
||||||
|
"Converter",
|
||||||
|
"converters",
|
||||||
|
"define",
|
||||||
|
"evolve",
|
||||||
|
"exceptions",
|
||||||
|
"Factory",
|
||||||
|
"field",
|
||||||
|
"fields_dict",
|
||||||
|
"fields",
|
||||||
|
"filters",
|
||||||
|
"frozen",
|
||||||
|
"has",
|
||||||
|
"make_class",
|
||||||
|
"mutable",
|
||||||
|
"NOTHING",
|
||||||
|
"resolve_types",
|
||||||
|
"setters",
|
||||||
|
"validate",
|
||||||
|
"validators",
|
||||||
|
]
|
||||||
|
|
||||||
|
__getattr__ = _make_getattr(__name__)
|
||||||
@ -0,0 +1,252 @@
|
|||||||
|
import sys
|
||||||
|
|
||||||
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Callable,
|
||||||
|
Mapping,
|
||||||
|
Sequence,
|
||||||
|
overload,
|
||||||
|
TypeVar,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Because we need to type our own stuff, we have to make everything from
|
||||||
|
# attr explicitly public too.
|
||||||
|
from attr import __author__ as __author__
|
||||||
|
from attr import __copyright__ as __copyright__
|
||||||
|
from attr import __description__ as __description__
|
||||||
|
from attr import __email__ as __email__
|
||||||
|
from attr import __license__ as __license__
|
||||||
|
from attr import __title__ as __title__
|
||||||
|
from attr import __url__ as __url__
|
||||||
|
from attr import __version__ as __version__
|
||||||
|
from attr import __version_info__ as __version_info__
|
||||||
|
from attr import assoc as assoc
|
||||||
|
from attr import Attribute as Attribute
|
||||||
|
from attr import AttrsInstance as AttrsInstance
|
||||||
|
from attr import cmp_using as cmp_using
|
||||||
|
from attr import converters as converters
|
||||||
|
from attr import Converter as Converter
|
||||||
|
from attr import evolve as evolve
|
||||||
|
from attr import exceptions as exceptions
|
||||||
|
from attr import Factory as Factory
|
||||||
|
from attr import fields as fields
|
||||||
|
from attr import fields_dict as fields_dict
|
||||||
|
from attr import filters as filters
|
||||||
|
from attr import has as has
|
||||||
|
from attr import make_class as make_class
|
||||||
|
from attr import NOTHING as NOTHING
|
||||||
|
from attr import resolve_types as resolve_types
|
||||||
|
from attr import setters as setters
|
||||||
|
from attr import validate as validate
|
||||||
|
from attr import validators as validators
|
||||||
|
from attr import attrib, asdict as asdict, astuple as astuple
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 11):
|
||||||
|
from typing import dataclass_transform
|
||||||
|
else:
|
||||||
|
from typing_extensions import dataclass_transform
|
||||||
|
|
||||||
|
_T = TypeVar("_T")
|
||||||
|
_C = TypeVar("_C", bound=type)
|
||||||
|
|
||||||
|
_EqOrderType = bool | Callable[[Any], Any]
|
||||||
|
_ValidatorType = Callable[[Any, "Attribute[_T]", _T], Any]
|
||||||
|
_ConverterType = Callable[[Any], Any]
|
||||||
|
_ReprType = Callable[[Any], str]
|
||||||
|
_ReprArgType = bool | _ReprType
|
||||||
|
_OnSetAttrType = Callable[[Any, "Attribute[Any]", Any], Any]
|
||||||
|
_OnSetAttrArgType = _OnSetAttrType | list[_OnSetAttrType] | setters._NoOpType
|
||||||
|
_FieldTransformer = Callable[
|
||||||
|
[type, list["Attribute[Any]"]], list["Attribute[Any]"]
|
||||||
|
]
|
||||||
|
# FIXME: in reality, if multiple validators are passed they must be in a list
|
||||||
|
# or tuple, but those are invariant and so would prevent subtypes of
|
||||||
|
# _ValidatorType from working when passed in a list or tuple.
|
||||||
|
_ValidatorArgType = _ValidatorType[_T] | Sequence[_ValidatorType[_T]]
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def field(
|
||||||
|
*,
|
||||||
|
default: None = ...,
|
||||||
|
validator: None = ...,
|
||||||
|
repr: _ReprArgType = ...,
|
||||||
|
hash: bool | None = ...,
|
||||||
|
init: bool = ...,
|
||||||
|
metadata: Mapping[Any, Any] | None = ...,
|
||||||
|
converter: None = ...,
|
||||||
|
factory: None = ...,
|
||||||
|
kw_only: bool = ...,
|
||||||
|
eq: bool | None = ...,
|
||||||
|
order: bool | None = ...,
|
||||||
|
on_setattr: _OnSetAttrArgType | None = ...,
|
||||||
|
alias: str | None = ...,
|
||||||
|
type: type | None = ...,
|
||||||
|
) -> Any: ...
|
||||||
|
|
||||||
|
# This form catches an explicit None or no default and infers the type from the
|
||||||
|
# other arguments.
|
||||||
|
@overload
|
||||||
|
def field(
|
||||||
|
*,
|
||||||
|
default: None = ...,
|
||||||
|
validator: _ValidatorArgType[_T] | None = ...,
|
||||||
|
repr: _ReprArgType = ...,
|
||||||
|
hash: bool | None = ...,
|
||||||
|
init: bool = ...,
|
||||||
|
metadata: Mapping[Any, Any] | None = ...,
|
||||||
|
converter: _ConverterType | Converter[Any, _T] | None = ...,
|
||||||
|
factory: Callable[[], _T] | None = ...,
|
||||||
|
kw_only: bool = ...,
|
||||||
|
eq: _EqOrderType | None = ...,
|
||||||
|
order: _EqOrderType | None = ...,
|
||||||
|
on_setattr: _OnSetAttrArgType | None = ...,
|
||||||
|
alias: str | None = ...,
|
||||||
|
type: type | None = ...,
|
||||||
|
) -> _T: ...
|
||||||
|
|
||||||
|
# This form catches an explicit default argument.
|
||||||
|
@overload
|
||||||
|
def field(
|
||||||
|
*,
|
||||||
|
default: _T,
|
||||||
|
validator: _ValidatorArgType[_T] | None = ...,
|
||||||
|
repr: _ReprArgType = ...,
|
||||||
|
hash: bool | None = ...,
|
||||||
|
init: bool = ...,
|
||||||
|
metadata: Mapping[Any, Any] | None = ...,
|
||||||
|
converter: _ConverterType | Converter[Any, _T] | None = ...,
|
||||||
|
factory: Callable[[], _T] | None = ...,
|
||||||
|
kw_only: bool = ...,
|
||||||
|
eq: _EqOrderType | None = ...,
|
||||||
|
order: _EqOrderType | None = ...,
|
||||||
|
on_setattr: _OnSetAttrArgType | None = ...,
|
||||||
|
alias: str | None = ...,
|
||||||
|
type: type | None = ...,
|
||||||
|
) -> _T: ...
|
||||||
|
|
||||||
|
# This form covers type=non-Type: e.g. forward references (str), Any
|
||||||
|
@overload
|
||||||
|
def field(
|
||||||
|
*,
|
||||||
|
default: _T | None = ...,
|
||||||
|
validator: _ValidatorArgType[_T] | None = ...,
|
||||||
|
repr: _ReprArgType = ...,
|
||||||
|
hash: bool | None = ...,
|
||||||
|
init: bool = ...,
|
||||||
|
metadata: Mapping[Any, Any] | None = ...,
|
||||||
|
converter: _ConverterType | Converter[Any, _T] | None = ...,
|
||||||
|
factory: Callable[[], _T] | None = ...,
|
||||||
|
kw_only: bool = ...,
|
||||||
|
eq: _EqOrderType | None = ...,
|
||||||
|
order: _EqOrderType | None = ...,
|
||||||
|
on_setattr: _OnSetAttrArgType | None = ...,
|
||||||
|
alias: str | None = ...,
|
||||||
|
type: type | None = ...,
|
||||||
|
) -> Any: ...
|
||||||
|
@overload
|
||||||
|
@dataclass_transform(field_specifiers=(attrib, field))
|
||||||
|
def define(
|
||||||
|
maybe_cls: _C,
|
||||||
|
*,
|
||||||
|
these: dict[str, Any] | None = ...,
|
||||||
|
repr: bool = ...,
|
||||||
|
unsafe_hash: bool | None = ...,
|
||||||
|
hash: bool | None = ...,
|
||||||
|
init: bool = ...,
|
||||||
|
slots: bool = ...,
|
||||||
|
frozen: bool = ...,
|
||||||
|
weakref_slot: bool = ...,
|
||||||
|
str: bool = ...,
|
||||||
|
auto_attribs: bool = ...,
|
||||||
|
kw_only: bool = ...,
|
||||||
|
cache_hash: bool = ...,
|
||||||
|
auto_exc: bool = ...,
|
||||||
|
eq: bool | None = ...,
|
||||||
|
order: bool | None = ...,
|
||||||
|
auto_detect: bool = ...,
|
||||||
|
getstate_setstate: bool | None = ...,
|
||||||
|
on_setattr: _OnSetAttrArgType | None = ...,
|
||||||
|
field_transformer: _FieldTransformer | None = ...,
|
||||||
|
match_args: bool = ...,
|
||||||
|
) -> _C: ...
|
||||||
|
@overload
|
||||||
|
@dataclass_transform(field_specifiers=(attrib, field))
|
||||||
|
def define(
|
||||||
|
maybe_cls: None = ...,
|
||||||
|
*,
|
||||||
|
these: dict[str, Any] | None = ...,
|
||||||
|
repr: bool = ...,
|
||||||
|
unsafe_hash: bool | None = ...,
|
||||||
|
hash: bool | None = ...,
|
||||||
|
init: bool = ...,
|
||||||
|
slots: bool = ...,
|
||||||
|
frozen: bool = ...,
|
||||||
|
weakref_slot: bool = ...,
|
||||||
|
str: bool = ...,
|
||||||
|
auto_attribs: bool = ...,
|
||||||
|
kw_only: bool = ...,
|
||||||
|
cache_hash: bool = ...,
|
||||||
|
auto_exc: bool = ...,
|
||||||
|
eq: bool | None = ...,
|
||||||
|
order: bool | None = ...,
|
||||||
|
auto_detect: bool = ...,
|
||||||
|
getstate_setstate: bool | None = ...,
|
||||||
|
on_setattr: _OnSetAttrArgType | None = ...,
|
||||||
|
field_transformer: _FieldTransformer | None = ...,
|
||||||
|
match_args: bool = ...,
|
||||||
|
) -> Callable[[_C], _C]: ...
|
||||||
|
|
||||||
|
mutable = define
|
||||||
|
|
||||||
|
@overload
|
||||||
|
@dataclass_transform(frozen_default=True, field_specifiers=(attrib, field))
|
||||||
|
def frozen(
|
||||||
|
maybe_cls: _C,
|
||||||
|
*,
|
||||||
|
these: dict[str, Any] | None = ...,
|
||||||
|
repr: bool = ...,
|
||||||
|
unsafe_hash: bool | None = ...,
|
||||||
|
hash: bool | None = ...,
|
||||||
|
init: bool = ...,
|
||||||
|
slots: bool = ...,
|
||||||
|
frozen: bool = ...,
|
||||||
|
weakref_slot: bool = ...,
|
||||||
|
str: bool = ...,
|
||||||
|
auto_attribs: bool = ...,
|
||||||
|
kw_only: bool = ...,
|
||||||
|
cache_hash: bool = ...,
|
||||||
|
auto_exc: bool = ...,
|
||||||
|
eq: bool | None = ...,
|
||||||
|
order: bool | None = ...,
|
||||||
|
auto_detect: bool = ...,
|
||||||
|
getstate_setstate: bool | None = ...,
|
||||||
|
on_setattr: _OnSetAttrArgType | None = ...,
|
||||||
|
field_transformer: _FieldTransformer | None = ...,
|
||||||
|
match_args: bool = ...,
|
||||||
|
) -> _C: ...
|
||||||
|
@overload
|
||||||
|
@dataclass_transform(frozen_default=True, field_specifiers=(attrib, field))
|
||||||
|
def frozen(
|
||||||
|
maybe_cls: None = ...,
|
||||||
|
*,
|
||||||
|
these: dict[str, Any] | None = ...,
|
||||||
|
repr: bool = ...,
|
||||||
|
unsafe_hash: bool | None = ...,
|
||||||
|
hash: bool | None = ...,
|
||||||
|
init: bool = ...,
|
||||||
|
slots: bool = ...,
|
||||||
|
frozen: bool = ...,
|
||||||
|
weakref_slot: bool = ...,
|
||||||
|
str: bool = ...,
|
||||||
|
auto_attribs: bool = ...,
|
||||||
|
kw_only: bool = ...,
|
||||||
|
cache_hash: bool = ...,
|
||||||
|
auto_exc: bool = ...,
|
||||||
|
eq: bool | None = ...,
|
||||||
|
order: bool | None = ...,
|
||||||
|
auto_detect: bool = ...,
|
||||||
|
getstate_setstate: bool | None = ...,
|
||||||
|
on_setattr: _OnSetAttrArgType | None = ...,
|
||||||
|
field_transformer: _FieldTransformer | None = ...,
|
||||||
|
match_args: bool = ...,
|
||||||
|
) -> Callable[[_C], _C]: ...
|
||||||
@ -0,0 +1,3 @@
|
|||||||
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
from attr.converters import * # noqa: F403
|
||||||
@ -0,0 +1,3 @@
|
|||||||
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
from attr.exceptions import * # noqa: F403
|
||||||
@ -0,0 +1,3 @@
|
|||||||
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
from attr.filters import * # noqa: F403
|
||||||
@ -0,0 +1,3 @@
|
|||||||
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
from attr.setters import * # noqa: F403
|
||||||
@ -0,0 +1,3 @@
|
|||||||
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
from attr.validators import * # noqa: F403
|
||||||
@ -0,0 +1,8 @@
|
|||||||
|
#!/venv/bin/python3.7
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from jsonspec.cli import main
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(main())
|
||||||
@ -0,0 +1,8 @@
|
|||||||
|
#!/venv/bin/python3.7
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from jsonpath_rw.bin.jsonpath import entry_point
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(entry_point())
|
||||||
@ -0,0 +1,8 @@
|
|||||||
|
#!/venv/bin/python3.7
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from jsonpath_ng.bin.jsonpath import entry_point
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(entry_point())
|
||||||
@ -0,0 +1,8 @@
|
|||||||
|
#!/venv/bin/python3.7
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from jsonschema.cli import main
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(main())
|
||||||
@ -0,0 +1,8 @@
|
|||||||
|
#!/venv/bin/python3.7
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from mako.cmd import cmdline
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(cmdline())
|
||||||
@ -0,0 +1,8 @@
|
|||||||
|
#!/venv/bin/python3.7
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from charset_normalizer.cli.normalizer import cli_detect
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(cli_detect())
|
||||||
@ -0,0 +1 @@
|
|||||||
|
pip
|
||||||
@ -0,0 +1,78 @@
|
|||||||
|
Metadata-Version: 2.4
|
||||||
|
Name: certifi
|
||||||
|
Version: 2025.4.26
|
||||||
|
Summary: Python package for providing Mozilla's CA Bundle.
|
||||||
|
Home-page: https://github.com/certifi/python-certifi
|
||||||
|
Author: Kenneth Reitz
|
||||||
|
Author-email: me@kennethreitz.com
|
||||||
|
License: MPL-2.0
|
||||||
|
Project-URL: Source, https://github.com/certifi/python-certifi
|
||||||
|
Classifier: Development Status :: 5 - Production/Stable
|
||||||
|
Classifier: Intended Audience :: Developers
|
||||||
|
Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)
|
||||||
|
Classifier: Natural Language :: English
|
||||||
|
Classifier: Programming Language :: Python
|
||||||
|
Classifier: Programming Language :: Python :: 3
|
||||||
|
Classifier: Programming Language :: Python :: 3 :: Only
|
||||||
|
Classifier: Programming Language :: Python :: 3.6
|
||||||
|
Classifier: Programming Language :: Python :: 3.7
|
||||||
|
Classifier: Programming Language :: Python :: 3.8
|
||||||
|
Classifier: Programming Language :: Python :: 3.9
|
||||||
|
Classifier: Programming Language :: Python :: 3.10
|
||||||
|
Classifier: Programming Language :: Python :: 3.11
|
||||||
|
Classifier: Programming Language :: Python :: 3.12
|
||||||
|
Classifier: Programming Language :: Python :: 3.13
|
||||||
|
Requires-Python: >=3.6
|
||||||
|
License-File: LICENSE
|
||||||
|
Dynamic: author
|
||||||
|
Dynamic: author-email
|
||||||
|
Dynamic: classifier
|
||||||
|
Dynamic: description
|
||||||
|
Dynamic: home-page
|
||||||
|
Dynamic: license
|
||||||
|
Dynamic: license-file
|
||||||
|
Dynamic: project-url
|
||||||
|
Dynamic: requires-python
|
||||||
|
Dynamic: summary
|
||||||
|
|
||||||
|
Certifi: Python SSL Certificates
|
||||||
|
================================
|
||||||
|
|
||||||
|
Certifi provides Mozilla's carefully curated collection of Root Certificates for
|
||||||
|
validating the trustworthiness of SSL certificates while verifying the identity
|
||||||
|
of TLS hosts. It has been extracted from the `Requests`_ project.
|
||||||
|
|
||||||
|
Installation
|
||||||
|
------------
|
||||||
|
|
||||||
|
``certifi`` is available on PyPI. Simply install it with ``pip``::
|
||||||
|
|
||||||
|
$ pip install certifi
|
||||||
|
|
||||||
|
Usage
|
||||||
|
-----
|
||||||
|
|
||||||
|
To reference the installed certificate authority (CA) bundle, you can use the
|
||||||
|
built-in function::
|
||||||
|
|
||||||
|
>>> import certifi
|
||||||
|
|
||||||
|
>>> certifi.where()
|
||||||
|
'/usr/local/lib/python3.7/site-packages/certifi/cacert.pem'
|
||||||
|
|
||||||
|
Or from the command line::
|
||||||
|
|
||||||
|
$ python -m certifi
|
||||||
|
/usr/local/lib/python3.7/site-packages/certifi/cacert.pem
|
||||||
|
|
||||||
|
Enjoy!
|
||||||
|
|
||||||
|
.. _`Requests`: https://requests.readthedocs.io/en/master/
|
||||||
|
|
||||||
|
Addition/Removal of Certificates
|
||||||
|
--------------------------------
|
||||||
|
|
||||||
|
Certifi does not support any addition/removal or other modification of the
|
||||||
|
CA trust store content. This project is intended to provide a reliable and
|
||||||
|
highly portable root of trust to python deployments. Look to upstream projects
|
||||||
|
for methods to use alternate trust.
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue