import sys
from awsglue.transforms import *
from awsglue.utils import getResolvedOptions
from pyspark.context import SparkContext
from awsglue.context import GlueContext
from awsglue.job import Job
from awsglue import DynamicFrame


args = getResolvedOptions(sys.argv, ['JOB_NAME','db_name','table_name'])
sc = SparkContext()
glueContext = GlueContext(sc)
spark = glueContext.spark_session
job = Job(glueContext)
job.init(args['JOB_NAME'], args)

# Script generated for node AWS Glue Data Catalog
AWSGlueDataCatalog_df = glueContext.create_data_frame.from_catalog(database=args['db_name'], table_name=args['table_name'])
AWSGlueDataCatalog_dyf = DynamicFrame.fromDF(AWSGlueDataCatalog_df, glueContext, "AWSGlueDataCatalog_dyf")

# Script generated for node Select Fields
SelectFields_dyf = SelectFields.apply(frame=AWSGlueDataCatalog_dyf, paths=["Id", "UpsellOpportunity__c"], transformation_ctx="SelectFields_dyf")

# Script generated for node Salesforce
Salesforce_node = glueContext.write_dynamic_frame.from_options(frame=SelectFields_dyf, connection_type="salesforce", connection_options={"apiVersion": "v60.0", "connectionName": "Salesforce_Connection", "entityName": "Account", "writeOperation": "UPDATE", "idFieldNames": "Id"}, transformation_ctx="Salesforce_node")


job.commit()