This guide covers operational concerns for building production-focused Pyvider providers, including error handling, logging, performance optimization, testing, and security.
🤖 AI-Generated Content
This documentation was generated with AI assistance and is still being audited. Some, or potentially a lot, of this information may be inaccurate. Learn more.
For foundational design patterns and code organization, see Best Practices.
frompyvider.common.errorsimportResourceError# Good: Actionable error with contextasyncdef_create(self,ctx:ResourceContext,base_plan:dict)->tuple[dict|None,None]:try:awaitself.api.create_resource(...)exceptPermissionErrorase:raiseResourceError(f"Permission denied writing to {base_plan['filename']}. "f"Ensure the Terraform process has write access.",details={"path":base_plan['filename'],"error":str(e)})exceptQuotaExceededErrorase:raiseResourceError(f"Quota exceeded: {e.limit} resources allowed, {e.current} in use. "f"Contact your administrator or upgrade your plan.",details={"limit":e.limit,"current":e.current})# Bad: Generic, unhelpful errorsasyncdef_create(self,ctx:ResourceContext,base_plan:dict)->tuple[dict|None,None]:try:awaitself.api.create_resource(...)exceptExceptionase:raiseException("Error")# No context, not actionable
fromprovide.foundation.errorsimportresilient@resilient()# Auto-retry with exponential backoffasyncdefread(self,ctx:ResourceContext)->State|None:"""Read with automatic retry on transient failures."""returnawaitself.api.get_resource(ctx.state.id)# For operations that shouldn't retry:asyncdef_delete(self,ctx:ResourceContext)->None:"""Delete without retry - we want immediate feedback."""awaitself.api.delete(ctx.state.id)
asyncdef_validate_config(self,config:FileContentConfig)->list[str]:"""Validate configuration before any operations."""errors=[]# Check for absolute pathsifconfig.filename.startswith("/"):errors.append("Absolute paths not allowed, use relative paths")# Check for parent directory traversalif".."inconfig.filename:errors.append("Parent directory access (..) not allowed")# Check content sizeiflen(config.content)>10*1024*1024:# 10MBerrors.append("Content exceeds 10MB limit")returnerrors
fromprovide.foundationimportlogger# Good: Structured with contextlogger.debug("Read file content",filename=filename,content_length=len(content),content_hash=content_hash[:8],operation="read")# Good: Log state transitionslogger.info("Resource created successfully",resource_class=self.__class__.__name__,resource_id=result_id)# Bad: String concatenation, no contextlogger.debug(f"Read file {filename} with {len(content)} bytes")
# Good: Mask sensitive datalogger.debug("Authenticated with API key",key_prefix=api_key[:4]+"****")# Bad: Logging secretslogger.debug(f"API key: {api_key}")# NEVER!# Use sensitive=True in schema@define(frozen=True)classProviderConfig:api_key:str=field(metadata={"sensitive":True})# Won't be logged
# Good: Batch operationsasyncdef_create_multiple(self,resources:list)->list:returnawaitself.api.batch_create(resources)# Bad: Loop with individual callsasyncdef_create_multiple(self,resources:list)->list:results=[]forresourceinresources:result=awaitself.api.create(resource)# N API calls!results.append(result)returnresults
fromfunctoolsimportlru_cacheclassMyDataSource(BaseDataSource):@lru_cache(maxsize=128)def_parse_config_schema(self,schema_str:str)->dict:"""Cache expensive parsing operations."""returnjson.loads(schema_str)asyncdefread(self,ctx:ResourceContext)->State:# Use cached result if availableparsed=self._parse_config_schema(config.schema)returnState(...)
# Good: Stream large filesasyncdef_process_large_file(self,path:Path)->str:hash_obj=hashlib.sha256()asyncwithaio.open(path,'rb')asf:asyncforchunkinf:hash_obj.update(chunk)returnhash_obj.hexdigest()# Bad: Load entire fileasyncdef_process_large_file(self,path:Path)->str:content=awaitpath.read_bytes()# Could be gigabytes!returnhashlib.sha256(content).hexdigest()
importhttpxclassMyProvider(BaseProvider):asyncdefconfigure(self,config:ProviderConfig)->None:# Good: Reusable async client with connection poolingself.http_client=httpx.AsyncClient(base_url=config.api_endpoint,timeout=config.timeout,limits=httpx.Limits(max_connections=100,max_keepalive_connections=20))asyncdefcleanup(self)->None:"""Clean up resources on shutdown."""awaitself.http_client.aclose()
@classmethoddefget_schema(cls)->PvsSchema:returns_resource({"api_key":a_str(required=True,sensitive=True,# Masked in logs and state filesdescription="API authentication key"),"password":a_str(required=True,sensitive=True,description="Database password"),})
asyncdef_validate_config(self,config:Config)->list[str]:"""Never trust user input - validate everything."""errors=[]# Validate URL formatifconfig.url:try:parsed=urlparse(config.url)ifparsed.schemenotin("http","https"):errors.append("URL must use http or https scheme")exceptException:errors.append("Invalid URL format")# Validate file pathsifhasattr(config,'filename'):ifos.path.isabs(config.filename):errors.append("Absolute paths not allowed")if".."inconfig.filename:errors.append("Parent directory traversal not allowed")returnerrors
frompyvider.resourcesimportPrivateStateclassMyResource(BaseResource):asyncdef_create(self,ctx:ResourceContext,base_plan:dict)->tuple[dict|None,bytes|None]:# Create resource and get credentialsresult=awaitself.api.create_with_credentials(...)# Store sensitive data in encrypted private stateprivate_data={"access_token":result.access_token,"refresh_token":result.refresh_token,"secret_key":result.secret_key}encrypted_private_state=PrivateState.encrypt(private_data)# Return public state and encrypted private statepublic_state={**base_plan,"id":result.id,"created_at":result.created_at# NO sensitive data here}returnpublic_state,encrypted_private_state