| idx
				 int64 0 7.85k | idx_lca
				 int64 0 223 | offset
				 int64 162 55k | repo
				 stringclasses 62
				values | commit_hash
				 stringclasses 113
				values | target_file
				 stringclasses 134
				values | line_type_lca
				 stringclasses 7
				values | ground_truth
				 stringlengths 1 46 | in_completions
				 bool 1
				class | completion_type
				 stringclasses 6
				values | non_dunder_count_intellij
				 int64 0 529 | non_dunder_count_jedi
				 int64 0 128 | start_with_
				 bool 2
				classes | first_occurrence
				 bool 2
				classes | intellij_completions
				 listlengths 1 532 | jedi_completions
				 listlengths 3 148 | prefix
				 stringlengths 162 55k | 
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 243 | 13 | 5,601 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	run | true | 
	function | 4 | 4 | false | false | 
	[
  "run",
  "parse_args",
  "build",
  "get_logger",
  "__annotations__",
  "__class__",
  "__delattr__",
  "__dict__",
  "__dir__",
  "__eq__",
  "__format__",
  "__getattribute__",
  "__hash__",
  "__init__",
  "__init_subclass__",
  "__ne__",
  "__new__",
  "__reduce__",
  "__reduce_ex__",
  "__repr__",
  "__setattr__",
  "__sizeof__",
  "__slots__",
  "__str__",
  "__subclasshook__",
  "__doc__",
  "__module__"
] | 
	[
  {
    "name": "build",
    "type": "function"
  },
  {
    "name": "get_logger",
    "type": "function"
  },
  {
    "name": "parse_args",
    "type": "function"
  },
  {
    "name": "run",
    "type": "function"
  },
  {
    "name": "__annotations__",
    "type": "statement"
  },
  {
    "name": "__class__",
    "type": "property"
  },
  {
    "name": "__delattr__",
    "type": "function"
  },
  {
    "name": "__dict__",
    "type": "statement"
  },
  {
    "name": "__dir__",
    "type": "function"
  },
  {
    "name": "__doc__",
    "type": "statement"
  },
  {
    "name": "__eq__",
    "type": "function"
  },
  {
    "name": "__format__",
    "type": "function"
  },
  {
    "name": "__getattribute__",
    "type": "function"
  },
  {
    "name": "__hash__",
    "type": "function"
  },
  {
    "name": "__init__",
    "type": "function"
  },
  {
    "name": "__init_subclass__",
    "type": "function"
  },
  {
    "name": "__module__",
    "type": "statement"
  },
  {
    "name": "__ne__",
    "type": "function"
  },
  {
    "name": "__new__",
    "type": "function"
  },
  {
    "name": "__reduce__",
    "type": "function"
  },
  {
    "name": "__reduce_ex__",
    "type": "function"
  },
  {
    "name": "__repr__",
    "type": "function"
  },
  {
    "name": "__setattr__",
    "type": "function"
  },
  {
    "name": "__sizeof__",
    "type": "function"
  },
  {
    "name": "__slots__",
    "type": "statement"
  },
  {
    "name": "__str__",
    "type": "function"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template. | 
| 244 | 13 | 5,711 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	FORMAT_JDBC | true | 
	statement | 86 | 86 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants. | 
| 245 | 13 | 5,801 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_URL | true | 
	statement | 86 | 86 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "JDBC_URL",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants. | 
| 246 | 13 | 5,904 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_DRIVER | true | 
	statement | 86 | 86 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_NUMPARTITIONS",
  "JDBC_DRIVER",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants. | 
| 247 | 13 | 6,022 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_TABLE | true | 
	statement | 86 | 86 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "JDBC_TABLE",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants. | 
| 248 | 13 | 6,148 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_PARTITIONCOLUMN | true | 
	statement | 86 | 86 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants. | 
| 249 | 13 | 6,293 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_LOWERBOUND | true | 
	statement | 86 | 86 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants. | 
| 250 | 13 | 6,437 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_UPPERBOUND | true | 
	statement | 86 | 86 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants. | 
| 251 | 13 | 6,590 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_NUMPARTITIONS | true | 
	statement | 86 | 86 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "JDBC_LOWERBOUND",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_NUMPARTITIONS",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants. | 
| 252 | 13 | 6,818 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	OUTPUT_MODE_APPEND | true | 
	statement | 86 | 86 | false | true | 
	[
  "JDBC_URL",
  "JDBC_TABLE",
  "FORMAT_JDBC",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants. | 
| 253 | 13 | 6,939 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	FORMAT_AVRO | true | 
	statement | 86 | 86 | false | true | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants. | 
| 254 | 13 | 7,328 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	parse_args | true | 
	function | 4 | 4 | false | false | 
	[
  "parse_args",
  "run",
  "build",
  "get_logger",
  "__annotations__",
  "__class__",
  "__delattr__",
  "__dict__",
  "__dir__",
  "__eq__",
  "__format__",
  "__getattribute__",
  "__hash__",
  "__init__",
  "__init_subclass__",
  "__ne__",
  "__new__",
  "__reduce__",
  "__reduce_ex__",
  "__repr__",
  "__setattr__",
  "__sizeof__",
  "__slots__",
  "__str__",
  "__subclasshook__",
  "__doc__",
  "__module__"
] | 
	[
  {
    "name": "build",
    "type": "function"
  },
  {
    "name": "get_logger",
    "type": "function"
  },
  {
    "name": "parse_args",
    "type": "function"
  },
  {
    "name": "run",
    "type": "function"
  },
  {
    "name": "__annotations__",
    "type": "statement"
  },
  {
    "name": "__class__",
    "type": "property"
  },
  {
    "name": "__delattr__",
    "type": "function"
  },
  {
    "name": "__dict__",
    "type": "statement"
  },
  {
    "name": "__dir__",
    "type": "function"
  },
  {
    "name": "__doc__",
    "type": "statement"
  },
  {
    "name": "__eq__",
    "type": "function"
  },
  {
    "name": "__format__",
    "type": "function"
  },
  {
    "name": "__getattribute__",
    "type": "function"
  },
  {
    "name": "__hash__",
    "type": "function"
  },
  {
    "name": "__init__",
    "type": "function"
  },
  {
    "name": "__init_subclass__",
    "type": "function"
  },
  {
    "name": "__module__",
    "type": "statement"
  },
  {
    "name": "__ne__",
    "type": "function"
  },
  {
    "name": "__new__",
    "type": "function"
  },
  {
    "name": "__reduce__",
    "type": "function"
  },
  {
    "name": "__reduce_ex__",
    "type": "function"
  },
  {
    "name": "__repr__",
    "type": "function"
  },
  {
    "name": "__setattr__",
    "type": "function"
  },
  {
    "name": "__sizeof__",
    "type": "function"
  },
  {
    "name": "__slots__",
    "type": "statement"
  },
  {
    "name": "__str__",
    "type": "function"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template. | 
| 255 | 13 | 8,026 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	run | true | 
	function | 4 | 4 | false | false | 
	[
  "run",
  "parse_args",
  "build",
  "get_logger",
  "__annotations__",
  "__class__",
  "__delattr__",
  "__dict__",
  "__dir__",
  "__eq__",
  "__format__",
  "__getattribute__",
  "__hash__",
  "__init__",
  "__init_subclass__",
  "__ne__",
  "__new__",
  "__reduce__",
  "__reduce_ex__",
  "__repr__",
  "__setattr__",
  "__sizeof__",
  "__slots__",
  "__str__",
  "__subclasshook__",
  "__doc__",
  "__module__"
] | 
	[
  {
    "name": "build",
    "type": "function"
  },
  {
    "name": "get_logger",
    "type": "function"
  },
  {
    "name": "parse_args",
    "type": "function"
  },
  {
    "name": "run",
    "type": "function"
  },
  {
    "name": "__annotations__",
    "type": "statement"
  },
  {
    "name": "__class__",
    "type": "property"
  },
  {
    "name": "__delattr__",
    "type": "function"
  },
  {
    "name": "__dict__",
    "type": "statement"
  },
  {
    "name": "__dir__",
    "type": "function"
  },
  {
    "name": "__doc__",
    "type": "statement"
  },
  {
    "name": "__eq__",
    "type": "function"
  },
  {
    "name": "__format__",
    "type": "function"
  },
  {
    "name": "__getattribute__",
    "type": "function"
  },
  {
    "name": "__hash__",
    "type": "function"
  },
  {
    "name": "__init__",
    "type": "function"
  },
  {
    "name": "__init_subclass__",
    "type": "function"
  },
  {
    "name": "__module__",
    "type": "statement"
  },
  {
    "name": "__ne__",
    "type": "function"
  },
  {
    "name": "__new__",
    "type": "function"
  },
  {
    "name": "__reduce__",
    "type": "function"
  },
  {
    "name": "__reduce_ex__",
    "type": "function"
  },
  {
    "name": "__repr__",
    "type": "function"
  },
  {
    "name": "__setattr__",
    "type": "function"
  },
  {
    "name": "__sizeof__",
    "type": "function"
  },
  {
    "name": "__slots__",
    "type": "statement"
  },
  {
    "name": "__str__",
    "type": "function"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template. | 
| 256 | 13 | 8,136 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	FORMAT_JDBC | true | 
	statement | 86 | 86 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants. | 
| 257 | 13 | 8,226 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_URL | true | 
	statement | 86 | 86 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "JDBC_URL",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants. | 
| 258 | 13 | 8,329 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	random | 
	JDBC_DRIVER | true | 
	statement | 86 | 86 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_NUMPARTITIONS",
  "JDBC_DRIVER",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants. | 
| 259 | 13 | 8,447 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_TABLE | true | 
	statement | 86 | 86 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "JDBC_TABLE",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants. | 
| 260 | 13 | 8,573 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_PARTITIONCOLUMN | true | 
	statement | 86 | 86 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants. | 
| 261 | 13 | 8,718 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_LOWERBOUND | true | 
	statement | 86 | 86 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants. | 
| 262 | 13 | 8,862 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_UPPERBOUND | true | 
	statement | 86 | 86 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants. | 
| 263 | 13 | 9,015 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	non_informative | 
	JDBC_NUMPARTITIONS | true | 
	statement | 86 | 86 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "JDBC_LOWERBOUND",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_NUMPARTITIONS",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants. | 
| 264 | 13 | 9,243 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	OUTPUT_MODE_IGNORE | true | 
	statement | 86 | 86 | false | true | 
	[
  "JDBC_URL",
  "JDBC_TABLE",
  "FORMAT_JDBC",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants. | 
| 265 | 13 | 9,364 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	CSV_HEADER | true | 
	statement | 86 | 86 | false | true | 
	[
  "JDBC_URL",
  "JDBC_TABLE",
  "FORMAT_JDBC",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants. | 
| 266 | 13 | 9,758 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	parse_args | true | 
	function | 4 | 4 | false | false | 
	[
  "parse_args",
  "run",
  "build",
  "get_logger",
  "__annotations__",
  "__class__",
  "__delattr__",
  "__dict__",
  "__dir__",
  "__eq__",
  "__format__",
  "__getattribute__",
  "__hash__",
  "__init__",
  "__init_subclass__",
  "__ne__",
  "__new__",
  "__reduce__",
  "__reduce_ex__",
  "__repr__",
  "__setattr__",
  "__sizeof__",
  "__slots__",
  "__str__",
  "__subclasshook__",
  "__doc__",
  "__module__"
] | 
	[
  {
    "name": "build",
    "type": "function"
  },
  {
    "name": "get_logger",
    "type": "function"
  },
  {
    "name": "parse_args",
    "type": "function"
  },
  {
    "name": "run",
    "type": "function"
  },
  {
    "name": "__annotations__",
    "type": "statement"
  },
  {
    "name": "__class__",
    "type": "property"
  },
  {
    "name": "__delattr__",
    "type": "function"
  },
  {
    "name": "__dict__",
    "type": "statement"
  },
  {
    "name": "__dir__",
    "type": "function"
  },
  {
    "name": "__doc__",
    "type": "statement"
  },
  {
    "name": "__eq__",
    "type": "function"
  },
  {
    "name": "__format__",
    "type": "function"
  },
  {
    "name": "__getattribute__",
    "type": "function"
  },
  {
    "name": "__hash__",
    "type": "function"
  },
  {
    "name": "__init__",
    "type": "function"
  },
  {
    "name": "__init_subclass__",
    "type": "function"
  },
  {
    "name": "__module__",
    "type": "statement"
  },
  {
    "name": "__ne__",
    "type": "function"
  },
  {
    "name": "__new__",
    "type": "function"
  },
  {
    "name": "__reduce__",
    "type": "function"
  },
  {
    "name": "__reduce_ex__",
    "type": "function"
  },
  {
    "name": "__repr__",
    "type": "function"
  },
  {
    "name": "__setattr__",
    "type": "function"
  },
  {
    "name": "__sizeof__",
    "type": "function"
  },
  {
    "name": "__slots__",
    "type": "statement"
  },
  {
    "name": "__str__",
    "type": "function"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template. | 
| 267 | 13 | 10,457 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	common | 
	run | true | 
	function | 4 | 4 | false | false | 
	[
  "run",
  "parse_args",
  "build",
  "get_logger",
  "__annotations__",
  "__class__",
  "__delattr__",
  "__dict__",
  "__dir__",
  "__eq__",
  "__format__",
  "__getattribute__",
  "__hash__",
  "__init__",
  "__init_subclass__",
  "__ne__",
  "__new__",
  "__reduce__",
  "__reduce_ex__",
  "__repr__",
  "__setattr__",
  "__sizeof__",
  "__slots__",
  "__str__",
  "__subclasshook__",
  "__doc__",
  "__module__"
] | 
	[
  {
    "name": "build",
    "type": "function"
  },
  {
    "name": "get_logger",
    "type": "function"
  },
  {
    "name": "parse_args",
    "type": "function"
  },
  {
    "name": "run",
    "type": "function"
  },
  {
    "name": "__annotations__",
    "type": "statement"
  },
  {
    "name": "__class__",
    "type": "property"
  },
  {
    "name": "__delattr__",
    "type": "function"
  },
  {
    "name": "__dict__",
    "type": "statement"
  },
  {
    "name": "__dir__",
    "type": "function"
  },
  {
    "name": "__doc__",
    "type": "statement"
  },
  {
    "name": "__eq__",
    "type": "function"
  },
  {
    "name": "__format__",
    "type": "function"
  },
  {
    "name": "__getattribute__",
    "type": "function"
  },
  {
    "name": "__hash__",
    "type": "function"
  },
  {
    "name": "__init__",
    "type": "function"
  },
  {
    "name": "__init_subclass__",
    "type": "function"
  },
  {
    "name": "__module__",
    "type": "statement"
  },
  {
    "name": "__ne__",
    "type": "function"
  },
  {
    "name": "__new__",
    "type": "function"
  },
  {
    "name": "__reduce__",
    "type": "function"
  },
  {
    "name": "__reduce_ex__",
    "type": "function"
  },
  {
    "name": "__repr__",
    "type": "function"
  },
  {
    "name": "__setattr__",
    "type": "function"
  },
  {
    "name": "__sizeof__",
    "type": "function"
  },
  {
    "name": "__slots__",
    "type": "statement"
  },
  {
    "name": "__str__",
    "type": "function"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=json",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template. | 
| 268 | 13 | 10,567 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	FORMAT_JDBC | true | 
	statement | 86 | 86 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=json",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants. | 
| 269 | 13 | 10,657 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_URL | true | 
	statement | 86 | 86 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "JDBC_URL",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=json",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants. | 
| 270 | 13 | 10,760 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_DRIVER | true | 
	statement | 86 | 86 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_NUMPARTITIONS",
  "JDBC_DRIVER",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=json",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants. | 
| 271 | 13 | 10,878 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_TABLE | true | 
	statement | 86 | 86 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "JDBC_TABLE",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=json",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants. | 
| 272 | 13 | 11,004 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_PARTITIONCOLUMN | true | 
	statement | 86 | 86 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=json",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants. | 
| 273 | 13 | 11,149 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_LOWERBOUND | true | 
	statement | 86 | 86 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=json",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants. | 
| 274 | 13 | 11,293 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_UPPERBOUND | true | 
	statement | 86 | 86 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=json",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants. | 
| 275 | 13 | 11,446 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_NUMPARTITIONS | true | 
	statement | 86 | 86 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "JDBC_LOWERBOUND",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_NUMPARTITIONS",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=json",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants. | 
| 276 | 13 | 11,674 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	OUTPUT_MODE_IGNORE | true | 
	statement | 86 | 86 | false | false | 
	[
  "JDBC_URL",
  "JDBC_TABLE",
  "FORMAT_JDBC",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=json",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants. | 
| 277 | 13 | 12,062 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	inproject | 
	parse_args | true | 
	function | 4 | 4 | false | false | 
	[
  "parse_args",
  "run",
  "build",
  "get_logger",
  "__annotations__",
  "__class__",
  "__delattr__",
  "__dict__",
  "__dir__",
  "__eq__",
  "__format__",
  "__getattribute__",
  "__hash__",
  "__init__",
  "__init_subclass__",
  "__ne__",
  "__new__",
  "__reduce__",
  "__reduce_ex__",
  "__repr__",
  "__setattr__",
  "__sizeof__",
  "__slots__",
  "__str__",
  "__subclasshook__",
  "__doc__",
  "__module__"
] | 
	[
  {
    "name": "build",
    "type": "function"
  },
  {
    "name": "get_logger",
    "type": "function"
  },
  {
    "name": "parse_args",
    "type": "function"
  },
  {
    "name": "run",
    "type": "function"
  },
  {
    "name": "__annotations__",
    "type": "statement"
  },
  {
    "name": "__class__",
    "type": "property"
  },
  {
    "name": "__delattr__",
    "type": "function"
  },
  {
    "name": "__dict__",
    "type": "statement"
  },
  {
    "name": "__dir__",
    "type": "function"
  },
  {
    "name": "__doc__",
    "type": "statement"
  },
  {
    "name": "__eq__",
    "type": "function"
  },
  {
    "name": "__format__",
    "type": "function"
  },
  {
    "name": "__getattribute__",
    "type": "function"
  },
  {
    "name": "__hash__",
    "type": "function"
  },
  {
    "name": "__init__",
    "type": "function"
  },
  {
    "name": "__init_subclass__",
    "type": "function"
  },
  {
    "name": "__module__",
    "type": "statement"
  },
  {
    "name": "__ne__",
    "type": "function"
  },
  {
    "name": "__new__",
    "type": "function"
  },
  {
    "name": "__reduce__",
    "type": "function"
  },
  {
    "name": "__reduce_ex__",
    "type": "function"
  },
  {
    "name": "__repr__",
    "type": "function"
  },
  {
    "name": "__setattr__",
    "type": "function"
  },
  {
    "name": "__sizeof__",
    "type": "function"
  },
  {
    "name": "__slots__",
    "type": "statement"
  },
  {
    "name": "__str__",
    "type": "function"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=json",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        #mock_spark_session.dataframe.DataFrame.write.mode().json.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args6(self, mock_spark_session):
        """Tests JDBCToGCSTemplate pass args"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template. | 
| 278 | 13 | 12,538 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	run | true | 
	function | 4 | 4 | false | false | 
	[
  "run",
  "parse_args",
  "build",
  "get_logger",
  "__annotations__",
  "__class__",
  "__delattr__",
  "__dict__",
  "__dir__",
  "__eq__",
  "__format__",
  "__getattribute__",
  "__hash__",
  "__init__",
  "__init_subclass__",
  "__ne__",
  "__new__",
  "__reduce__",
  "__reduce_ex__",
  "__repr__",
  "__setattr__",
  "__sizeof__",
  "__slots__",
  "__str__",
  "__subclasshook__",
  "__doc__",
  "__module__"
] | 
	[
  {
    "name": "build",
    "type": "function"
  },
  {
    "name": "get_logger",
    "type": "function"
  },
  {
    "name": "parse_args",
    "type": "function"
  },
  {
    "name": "run",
    "type": "function"
  },
  {
    "name": "__annotations__",
    "type": "statement"
  },
  {
    "name": "__class__",
    "type": "property"
  },
  {
    "name": "__delattr__",
    "type": "function"
  },
  {
    "name": "__dict__",
    "type": "statement"
  },
  {
    "name": "__dir__",
    "type": "function"
  },
  {
    "name": "__doc__",
    "type": "statement"
  },
  {
    "name": "__eq__",
    "type": "function"
  },
  {
    "name": "__format__",
    "type": "function"
  },
  {
    "name": "__getattribute__",
    "type": "function"
  },
  {
    "name": "__hash__",
    "type": "function"
  },
  {
    "name": "__init__",
    "type": "function"
  },
  {
    "name": "__init_subclass__",
    "type": "function"
  },
  {
    "name": "__module__",
    "type": "statement"
  },
  {
    "name": "__ne__",
    "type": "function"
  },
  {
    "name": "__new__",
    "type": "function"
  },
  {
    "name": "__reduce__",
    "type": "function"
  },
  {
    "name": "__reduce_ex__",
    "type": "function"
  },
  {
    "name": "__repr__",
    "type": "function"
  },
  {
    "name": "__setattr__",
    "type": "function"
  },
  {
    "name": "__sizeof__",
    "type": "function"
  },
  {
    "name": "__slots__",
    "type": "statement"
  },
  {
    "name": "__str__",
    "type": "function"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=json",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        #mock_spark_session.dataframe.DataFrame.write.mode().json.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args6(self, mock_spark_session):
        """Tests JDBCToGCSTemplate pass args"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template. | 
| 279 | 13 | 12,648 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	FORMAT_JDBC | true | 
	statement | 86 | 86 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=json",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        #mock_spark_session.dataframe.DataFrame.write.mode().json.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args6(self, mock_spark_session):
        """Tests JDBCToGCSTemplate pass args"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants. | 
| 280 | 13 | 12,738 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_URL | true | 
	statement | 86 | 86 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "JDBC_URL",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=json",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        #mock_spark_session.dataframe.DataFrame.write.mode().json.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args6(self, mock_spark_session):
        """Tests JDBCToGCSTemplate pass args"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants. | 
| 281 | 13 | 12,841 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_DRIVER | true | 
	statement | 86 | 86 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_NUMPARTITIONS",
  "JDBC_DRIVER",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=json",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        #mock_spark_session.dataframe.DataFrame.write.mode().json.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args6(self, mock_spark_session):
        """Tests JDBCToGCSTemplate pass args"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants. | 
| 282 | 13 | 12,959 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_TABLE | true | 
	statement | 86 | 86 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_DRIVER",
  "JDBC_TABLE",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=json",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        #mock_spark_session.dataframe.DataFrame.write.mode().json.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args6(self, mock_spark_session):
        """Tests JDBCToGCSTemplate pass args"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants. | 
| 283 | 13 | 13,085 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_NUMPARTITIONS | true | 
	statement | 86 | 86 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "JDBC_LOWERBOUND",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_NUMPARTITIONS",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=json",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        #mock_spark_session.dataframe.DataFrame.write.mode().json.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args6(self, mock_spark_session):
        """Tests JDBCToGCSTemplate pass args"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants. | 
| 284 | 13 | 13,287 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	OUTPUT_MODE_APPEND | true | 
	statement | 86 | 86 | false | false | 
	[
  "JDBC_URL",
  "JDBC_TABLE",
  "FORMAT_JDBC",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=json",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        #mock_spark_session.dataframe.DataFrame.write.mode().json.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args6(self, mock_spark_session):
        """Tests JDBCToGCSTemplate pass args"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "10")
        mock_spark_session.read.format().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants. | 
| 285 | 13 | 13,408 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	CSV_HEADER | true | 
	statement | 86 | 86 | false | false | 
	[
  "JDBC_URL",
  "JDBC_TABLE",
  "FORMAT_JDBC",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=json",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        #mock_spark_session.dataframe.DataFrame.write.mode().json.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args6(self, mock_spark_session):
        """Tests JDBCToGCSTemplate pass args"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "10")
        mock_spark_session.read.format().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants. | 
| 286 | 13 | 13,801 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	parse_args | true | 
	function | 4 | 4 | false | false | 
	[
  "parse_args",
  "run",
  "build",
  "get_logger",
  "__annotations__",
  "__class__",
  "__delattr__",
  "__dict__",
  "__dir__",
  "__eq__",
  "__format__",
  "__getattribute__",
  "__hash__",
  "__init__",
  "__init_subclass__",
  "__ne__",
  "__new__",
  "__reduce__",
  "__reduce_ex__",
  "__repr__",
  "__setattr__",
  "__sizeof__",
  "__slots__",
  "__str__",
  "__subclasshook__",
  "__doc__",
  "__module__"
] | 
	[
  {
    "name": "build",
    "type": "function"
  },
  {
    "name": "get_logger",
    "type": "function"
  },
  {
    "name": "parse_args",
    "type": "function"
  },
  {
    "name": "run",
    "type": "function"
  },
  {
    "name": "__annotations__",
    "type": "statement"
  },
  {
    "name": "__class__",
    "type": "property"
  },
  {
    "name": "__delattr__",
    "type": "function"
  },
  {
    "name": "__dict__",
    "type": "statement"
  },
  {
    "name": "__dir__",
    "type": "function"
  },
  {
    "name": "__doc__",
    "type": "statement"
  },
  {
    "name": "__eq__",
    "type": "function"
  },
  {
    "name": "__format__",
    "type": "function"
  },
  {
    "name": "__getattribute__",
    "type": "function"
  },
  {
    "name": "__hash__",
    "type": "function"
  },
  {
    "name": "__init__",
    "type": "function"
  },
  {
    "name": "__init_subclass__",
    "type": "function"
  },
  {
    "name": "__module__",
    "type": "statement"
  },
  {
    "name": "__ne__",
    "type": "function"
  },
  {
    "name": "__new__",
    "type": "function"
  },
  {
    "name": "__reduce__",
    "type": "function"
  },
  {
    "name": "__reduce_ex__",
    "type": "function"
  },
  {
    "name": "__repr__",
    "type": "function"
  },
  {
    "name": "__setattr__",
    "type": "function"
  },
  {
    "name": "__sizeof__",
    "type": "function"
  },
  {
    "name": "__slots__",
    "type": "statement"
  },
  {
    "name": "__str__",
    "type": "function"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=json",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        #mock_spark_session.dataframe.DataFrame.write.mode().json.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args6(self, mock_spark_session):
        """Tests JDBCToGCSTemplate pass args"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "10")
        mock_spark_session.read.format().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args7(self, mock_spark_session):
        """Tests JDBCToGCSTemplate pass args"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template. | 
| 287 | 13 | 14,335 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	run | true | 
	function | 4 | 4 | false | false | 
	[
  "run",
  "parse_args",
  "build",
  "get_logger",
  "__annotations__",
  "__class__",
  "__delattr__",
  "__dict__",
  "__dir__",
  "__eq__",
  "__format__",
  "__getattribute__",
  "__hash__",
  "__init__",
  "__init_subclass__",
  "__ne__",
  "__new__",
  "__reduce__",
  "__reduce_ex__",
  "__repr__",
  "__setattr__",
  "__sizeof__",
  "__slots__",
  "__str__",
  "__subclasshook__",
  "__doc__",
  "__module__"
] | 
	[
  {
    "name": "build",
    "type": "function"
  },
  {
    "name": "get_logger",
    "type": "function"
  },
  {
    "name": "parse_args",
    "type": "function"
  },
  {
    "name": "run",
    "type": "function"
  },
  {
    "name": "__annotations__",
    "type": "statement"
  },
  {
    "name": "__class__",
    "type": "property"
  },
  {
    "name": "__delattr__",
    "type": "function"
  },
  {
    "name": "__dict__",
    "type": "statement"
  },
  {
    "name": "__dir__",
    "type": "function"
  },
  {
    "name": "__doc__",
    "type": "statement"
  },
  {
    "name": "__eq__",
    "type": "function"
  },
  {
    "name": "__format__",
    "type": "function"
  },
  {
    "name": "__getattribute__",
    "type": "function"
  },
  {
    "name": "__hash__",
    "type": "function"
  },
  {
    "name": "__init__",
    "type": "function"
  },
  {
    "name": "__init_subclass__",
    "type": "function"
  },
  {
    "name": "__module__",
    "type": "statement"
  },
  {
    "name": "__ne__",
    "type": "function"
  },
  {
    "name": "__new__",
    "type": "function"
  },
  {
    "name": "__reduce__",
    "type": "function"
  },
  {
    "name": "__reduce_ex__",
    "type": "function"
  },
  {
    "name": "__repr__",
    "type": "function"
  },
  {
    "name": "__setattr__",
    "type": "function"
  },
  {
    "name": "__sizeof__",
    "type": "function"
  },
  {
    "name": "__slots__",
    "type": "statement"
  },
  {
    "name": "__str__",
    "type": "function"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=json",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        #mock_spark_session.dataframe.DataFrame.write.mode().json.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args6(self, mock_spark_session):
        """Tests JDBCToGCSTemplate pass args"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "10")
        mock_spark_session.read.format().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args7(self, mock_spark_session):
        """Tests JDBCToGCSTemplate pass args"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])
        mock_spark_session.read.format().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template. | 
| 288 | 13 | 14,445 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	FORMAT_JDBC | true | 
	statement | 86 | 86 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=json",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        #mock_spark_session.dataframe.DataFrame.write.mode().json.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args6(self, mock_spark_session):
        """Tests JDBCToGCSTemplate pass args"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "10")
        mock_spark_session.read.format().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args7(self, mock_spark_session):
        """Tests JDBCToGCSTemplate pass args"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])
        mock_spark_session.read.format().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants. | 
| 289 | 13 | 14,535 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_URL | true | 
	statement | 86 | 86 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "JDBC_URL",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=json",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        #mock_spark_session.dataframe.DataFrame.write.mode().json.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args6(self, mock_spark_session):
        """Tests JDBCToGCSTemplate pass args"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "10")
        mock_spark_session.read.format().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args7(self, mock_spark_session):
        """Tests JDBCToGCSTemplate pass args"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])
        mock_spark_session.read.format().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants. | 
| 290 | 13 | 14,638 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_DRIVER | true | 
	statement | 86 | 86 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_NUMPARTITIONS",
  "JDBC_DRIVER",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=json",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        #mock_spark_session.dataframe.DataFrame.write.mode().json.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args6(self, mock_spark_session):
        """Tests JDBCToGCSTemplate pass args"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "10")
        mock_spark_session.read.format().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args7(self, mock_spark_session):
        """Tests JDBCToGCSTemplate pass args"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])
        mock_spark_session.read.format().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants. | 
| 291 | 13 | 14,756 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_TABLE | true | 
	statement | 86 | 86 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_DRIVER",
  "JDBC_TABLE",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=json",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        #mock_spark_session.dataframe.DataFrame.write.mode().json.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args6(self, mock_spark_session):
        """Tests JDBCToGCSTemplate pass args"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "10")
        mock_spark_session.read.format().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args7(self, mock_spark_session):
        """Tests JDBCToGCSTemplate pass args"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])
        mock_spark_session.read.format().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants. | 
| 292 | 13 | 14,882 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_NUMPARTITIONS | true | 
	statement | 86 | 86 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "JDBC_LOWERBOUND",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_NUMPARTITIONS",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=json",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        #mock_spark_session.dataframe.DataFrame.write.mode().json.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args6(self, mock_spark_session):
        """Tests JDBCToGCSTemplate pass args"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "10")
        mock_spark_session.read.format().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args7(self, mock_spark_session):
        """Tests JDBCToGCSTemplate pass args"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])
        mock_spark_session.read.format().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants. | 
| 293 | 13 | 15,084 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	OUTPUT_MODE_APPEND | true | 
	statement | 86 | 86 | false | false | 
	[
  "JDBC_URL",
  "JDBC_TABLE",
  "FORMAT_JDBC",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=json",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        #mock_spark_session.dataframe.DataFrame.write.mode().json.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args6(self, mock_spark_session):
        """Tests JDBCToGCSTemplate pass args"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "10")
        mock_spark_session.read.format().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args7(self, mock_spark_session):
        """Tests JDBCToGCSTemplate pass args"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])
        mock_spark_session.read.format().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "10")
        mock_spark_session.read.format().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants. | 
| 294 | 13 | 15,325 | 
	googlecloudplatform__dataproc-templates | 
	bba5da698a8aa144c73d4d2a90e84c6a577ce7f4 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	CSV_HEADER | true | 
	statement | 86 | 86 | false | false | 
	[
  "JDBC_URL",
  "JDBC_TABLE",
  "FORMAT_JDBC",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "CSV_HEADER",
  "CSV_INFER_SCHEMA",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "CSV_HEADER",
    "type": "statement"
  },
  {
    "name": "CSV_INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=json",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        #mock_spark_session.dataframe.DataFrame.write.mode().json.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args6(self, mock_spark_session):
        """Tests JDBCToGCSTemplate pass args"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "10")
        mock_spark_session.read.format().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args7(self, mock_spark_session):
        """Tests JDBCToGCSTemplate pass args"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])
        mock_spark_session.read.format().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "10")
        mock_spark_session.read.format().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().partitionBy.assert_called_once_with("column")
        mock_spark_session.dataframe.DataFrame.write.mode().partitionBy().option.assert_called_once_with(constants. | 
| 295 | 14 | 1,013 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	inproject | 
	parse_args | true | 
	function | 4 | 4 | false | false | 
	[
  "parse_args",
  "run",
  "build",
  "get_logger",
  "__annotations__",
  "__class__",
  "__delattr__",
  "__dict__",
  "__dir__",
  "__eq__",
  "__format__",
  "__getattribute__",
  "__hash__",
  "__init__",
  "__init_subclass__",
  "__ne__",
  "__new__",
  "__reduce__",
  "__reduce_ex__",
  "__repr__",
  "__setattr__",
  "__sizeof__",
  "__slots__",
  "__str__",
  "__subclasshook__",
  "__doc__",
  "__module__"
] | 
	[
  {
    "name": "build",
    "type": "function"
  },
  {
    "name": "get_logger",
    "type": "function"
  },
  {
    "name": "parse_args",
    "type": "function"
  },
  {
    "name": "run",
    "type": "function"
  },
  {
    "name": "__annotations__",
    "type": "statement"
  },
  {
    "name": "__class__",
    "type": "property"
  },
  {
    "name": "__delattr__",
    "type": "function"
  },
  {
    "name": "__dict__",
    "type": "statement"
  },
  {
    "name": "__dir__",
    "type": "function"
  },
  {
    "name": "__doc__",
    "type": "statement"
  },
  {
    "name": "__eq__",
    "type": "function"
  },
  {
    "name": "__format__",
    "type": "function"
  },
  {
    "name": "__getattribute__",
    "type": "function"
  },
  {
    "name": "__hash__",
    "type": "function"
  },
  {
    "name": "__init__",
    "type": "function"
  },
  {
    "name": "__init_subclass__",
    "type": "function"
  },
  {
    "name": "__module__",
    "type": "statement"
  },
  {
    "name": "__ne__",
    "type": "function"
  },
  {
    "name": "__new__",
    "type": "function"
  },
  {
    "name": "__reduce__",
    "type": "function"
  },
  {
    "name": "__reduce_ex__",
    "type": "function"
  },
  {
    "name": "__repr__",
    "type": "function"
  },
  {
    "name": "__setattr__",
    "type": "function"
  },
  {
    "name": "__sizeof__",
    "type": "function"
  },
  {
    "name": "__slots__",
    "type": "statement"
  },
  {
    "name": "__str__",
    "type": "function"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template. | 
| 296 | 14 | 2,586 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	parse_args | true | 
	function | 4 | 4 | false | false | 
	[
  "parse_args",
  "run",
  "build",
  "get_logger",
  "__annotations__",
  "__class__",
  "__delattr__",
  "__dict__",
  "__dir__",
  "__eq__",
  "__format__",
  "__getattribute__",
  "__hash__",
  "__init__",
  "__init_subclass__",
  "__ne__",
  "__new__",
  "__reduce__",
  "__reduce_ex__",
  "__repr__",
  "__setattr__",
  "__sizeof__",
  "__slots__",
  "__str__",
  "__subclasshook__",
  "__doc__",
  "__module__"
] | 
	[
  {
    "name": "build",
    "type": "function"
  },
  {
    "name": "get_logger",
    "type": "function"
  },
  {
    "name": "parse_args",
    "type": "function"
  },
  {
    "name": "run",
    "type": "function"
  },
  {
    "name": "__annotations__",
    "type": "statement"
  },
  {
    "name": "__class__",
    "type": "property"
  },
  {
    "name": "__delattr__",
    "type": "function"
  },
  {
    "name": "__dict__",
    "type": "statement"
  },
  {
    "name": "__dir__",
    "type": "function"
  },
  {
    "name": "__doc__",
    "type": "statement"
  },
  {
    "name": "__eq__",
    "type": "function"
  },
  {
    "name": "__format__",
    "type": "function"
  },
  {
    "name": "__getattribute__",
    "type": "function"
  },
  {
    "name": "__hash__",
    "type": "function"
  },
  {
    "name": "__init__",
    "type": "function"
  },
  {
    "name": "__init_subclass__",
    "type": "function"
  },
  {
    "name": "__module__",
    "type": "statement"
  },
  {
    "name": "__ne__",
    "type": "function"
  },
  {
    "name": "__new__",
    "type": "function"
  },
  {
    "name": "__reduce__",
    "type": "function"
  },
  {
    "name": "__reduce_ex__",
    "type": "function"
  },
  {
    "name": "__repr__",
    "type": "function"
  },
  {
    "name": "__setattr__",
    "type": "function"
  },
  {
    "name": "__sizeof__",
    "type": "function"
  },
  {
    "name": "__slots__",
    "type": "statement"
  },
  {
    "name": "__str__",
    "type": "function"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template. | 
| 297 | 14 | 3,291 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	run | true | 
	function | 4 | 4 | false | false | 
	[
  "run",
  "parse_args",
  "build",
  "get_logger",
  "__annotations__",
  "__class__",
  "__delattr__",
  "__dict__",
  "__dir__",
  "__eq__",
  "__format__",
  "__getattribute__",
  "__hash__",
  "__init__",
  "__init_subclass__",
  "__ne__",
  "__new__",
  "__reduce__",
  "__reduce_ex__",
  "__repr__",
  "__setattr__",
  "__sizeof__",
  "__slots__",
  "__str__",
  "__subclasshook__",
  "__doc__",
  "__module__"
] | 
	[
  {
    "name": "build",
    "type": "function"
  },
  {
    "name": "get_logger",
    "type": "function"
  },
  {
    "name": "parse_args",
    "type": "function"
  },
  {
    "name": "run",
    "type": "function"
  },
  {
    "name": "__annotations__",
    "type": "statement"
  },
  {
    "name": "__class__",
    "type": "property"
  },
  {
    "name": "__delattr__",
    "type": "function"
  },
  {
    "name": "__dict__",
    "type": "statement"
  },
  {
    "name": "__dir__",
    "type": "function"
  },
  {
    "name": "__doc__",
    "type": "statement"
  },
  {
    "name": "__eq__",
    "type": "function"
  },
  {
    "name": "__format__",
    "type": "function"
  },
  {
    "name": "__getattribute__",
    "type": "function"
  },
  {
    "name": "__hash__",
    "type": "function"
  },
  {
    "name": "__init__",
    "type": "function"
  },
  {
    "name": "__init_subclass__",
    "type": "function"
  },
  {
    "name": "__module__",
    "type": "statement"
  },
  {
    "name": "__ne__",
    "type": "function"
  },
  {
    "name": "__new__",
    "type": "function"
  },
  {
    "name": "__reduce__",
    "type": "function"
  },
  {
    "name": "__reduce_ex__",
    "type": "function"
  },
  {
    "name": "__repr__",
    "type": "function"
  },
  {
    "name": "__setattr__",
    "type": "function"
  },
  {
    "name": "__sizeof__",
    "type": "function"
  },
  {
    "name": "__slots__",
    "type": "statement"
  },
  {
    "name": "__str__",
    "type": "function"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template. | 
| 298 | 14 | 3,401 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	FORMAT_JDBC | true | 
	statement | 103 | 103 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "FORMAT_AVRO",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "COMPRESSION_BZIP2",
  "COMPRESSION_DEFLATE",
  "COMPRESSION_GZIP",
  "COMPRESSION_LZ4",
  "COMPRESSION_NONE",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "FORMAT_TXT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HEADER",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "INFER_SCHEMA",
  "INPUT_COMPRESSION",
  "INPUT_DELIMITER",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_NUMPARTITIONS",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET",
  "TEXT_BQ_INPUT_INFERSCHEMA",
  "TEXT_BQ_INPUT_LOCATION",
  "TEXT_BQ_LD_TEMP_BUCKET_NAME",
  "TEXT_BQ_OUTPUT_DATASET",
  "TEXT_BQ_OUTPUT_MODE",
  "TEXT_BQ_OUTPUT_TABLE",
  "TEXT_BQ_TEMP_BUCKET",
  "TEXT_INPUT_COMPRESSION",
  "TEXT_INPUT_DELIMITER"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_BZIP2",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_DEFLATE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_GZIP",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_LZ4",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_NONE",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "FORMAT_TXT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HEADER",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_INFERSCHEMA",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants. | 
| 299 | 14 | 3,491 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_URL | true | 
	statement | 103 | 103 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "JDBC_URL",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "COMPRESSION_BZIP2",
  "COMPRESSION_DEFLATE",
  "COMPRESSION_GZIP",
  "COMPRESSION_LZ4",
  "COMPRESSION_NONE",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "FORMAT_TXT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HEADER",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "INFER_SCHEMA",
  "INPUT_COMPRESSION",
  "INPUT_DELIMITER",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET",
  "TEXT_BQ_INPUT_INFERSCHEMA",
  "TEXT_BQ_INPUT_LOCATION",
  "TEXT_BQ_LD_TEMP_BUCKET_NAME",
  "TEXT_BQ_OUTPUT_DATASET",
  "TEXT_BQ_OUTPUT_MODE",
  "TEXT_BQ_OUTPUT_TABLE",
  "TEXT_BQ_TEMP_BUCKET",
  "TEXT_INPUT_COMPRESSION",
  "TEXT_INPUT_DELIMITER"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_BZIP2",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_DEFLATE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_GZIP",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_LZ4",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_NONE",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "FORMAT_TXT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HEADER",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_INFERSCHEMA",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants. | 
| 300 | 14 | 3,594 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_DRIVER | true | 
	statement | 103 | 103 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "FORMAT_AVRO",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "COMPRESSION_BZIP2",
  "COMPRESSION_DEFLATE",
  "COMPRESSION_GZIP",
  "COMPRESSION_LZ4",
  "COMPRESSION_NONE",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "FORMAT_TXT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HEADER",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "INFER_SCHEMA",
  "INPUT_COMPRESSION",
  "INPUT_DELIMITER",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_DRIVER",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET",
  "TEXT_BQ_INPUT_INFERSCHEMA",
  "TEXT_BQ_INPUT_LOCATION",
  "TEXT_BQ_LD_TEMP_BUCKET_NAME",
  "TEXT_BQ_OUTPUT_DATASET",
  "TEXT_BQ_OUTPUT_MODE",
  "TEXT_BQ_OUTPUT_TABLE",
  "TEXT_BQ_TEMP_BUCKET",
  "TEXT_INPUT_COMPRESSION",
  "TEXT_INPUT_DELIMITER"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_BZIP2",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_DEFLATE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_GZIP",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_LZ4",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_NONE",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "FORMAT_TXT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HEADER",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_INFERSCHEMA",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants. | 
| 301 | 14 | 3,712 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_TABLE | true | 
	statement | 103 | 103 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_DRIVER",
  "JDBC_TABLE",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "COMPRESSION_BZIP2",
  "COMPRESSION_DEFLATE",
  "COMPRESSION_GZIP",
  "COMPRESSION_LZ4",
  "COMPRESSION_NONE",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "FORMAT_TXT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HEADER",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "INFER_SCHEMA",
  "INPUT_COMPRESSION",
  "INPUT_DELIMITER",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET",
  "TEXT_BQ_INPUT_INFERSCHEMA",
  "TEXT_BQ_INPUT_LOCATION",
  "TEXT_BQ_LD_TEMP_BUCKET_NAME",
  "TEXT_BQ_OUTPUT_DATASET",
  "TEXT_BQ_OUTPUT_MODE",
  "TEXT_BQ_OUTPUT_TABLE",
  "TEXT_BQ_TEMP_BUCKET",
  "TEXT_INPUT_COMPRESSION",
  "TEXT_INPUT_DELIMITER"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_BZIP2",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_DEFLATE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_GZIP",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_LZ4",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_NONE",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "FORMAT_TXT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HEADER",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_INFERSCHEMA",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants. | 
| 302 | 14 | 3,838 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_PARTITIONCOLUMN | true | 
	statement | 103 | 103 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "COMPRESSION_BZIP2",
  "COMPRESSION_DEFLATE",
  "COMPRESSION_GZIP",
  "COMPRESSION_LZ4",
  "COMPRESSION_NONE",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "FORMAT_TXT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HEADER",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "INFER_SCHEMA",
  "INPUT_COMPRESSION",
  "INPUT_DELIMITER",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET",
  "TEXT_BQ_INPUT_INFERSCHEMA",
  "TEXT_BQ_INPUT_LOCATION",
  "TEXT_BQ_LD_TEMP_BUCKET_NAME",
  "TEXT_BQ_OUTPUT_DATASET",
  "TEXT_BQ_OUTPUT_MODE",
  "TEXT_BQ_OUTPUT_TABLE",
  "TEXT_BQ_TEMP_BUCKET",
  "TEXT_INPUT_COMPRESSION",
  "TEXT_INPUT_DELIMITER"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_BZIP2",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_DEFLATE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_GZIP",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_LZ4",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_NONE",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "FORMAT_TXT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HEADER",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_INFERSCHEMA",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants. | 
| 303 | 14 | 3,983 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	random | 
	JDBC_LOWERBOUND | true | 
	statement | 103 | 103 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "FORMAT_AVRO",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "COMPRESSION_BZIP2",
  "COMPRESSION_DEFLATE",
  "COMPRESSION_GZIP",
  "COMPRESSION_LZ4",
  "COMPRESSION_NONE",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "FORMAT_TXT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HEADER",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "INFER_SCHEMA",
  "INPUT_COMPRESSION",
  "INPUT_DELIMITER",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_NUMPARTITIONS",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET",
  "TEXT_BQ_INPUT_INFERSCHEMA",
  "TEXT_BQ_INPUT_LOCATION",
  "TEXT_BQ_LD_TEMP_BUCKET_NAME",
  "TEXT_BQ_OUTPUT_DATASET",
  "TEXT_BQ_OUTPUT_MODE",
  "TEXT_BQ_OUTPUT_TABLE",
  "TEXT_BQ_TEMP_BUCKET",
  "TEXT_INPUT_COMPRESSION",
  "TEXT_INPUT_DELIMITER"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_BZIP2",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_DEFLATE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_GZIP",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_LZ4",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_NONE",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "FORMAT_TXT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HEADER",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_INFERSCHEMA",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants. | 
| 304 | 14 | 4,127 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_UPPERBOUND | true | 
	statement | 103 | 103 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "COMPRESSION_BZIP2",
  "COMPRESSION_DEFLATE",
  "COMPRESSION_GZIP",
  "COMPRESSION_LZ4",
  "COMPRESSION_NONE",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "FORMAT_TXT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HEADER",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "INFER_SCHEMA",
  "INPUT_COMPRESSION",
  "INPUT_DELIMITER",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET",
  "TEXT_BQ_INPUT_INFERSCHEMA",
  "TEXT_BQ_INPUT_LOCATION",
  "TEXT_BQ_LD_TEMP_BUCKET_NAME",
  "TEXT_BQ_OUTPUT_DATASET",
  "TEXT_BQ_OUTPUT_MODE",
  "TEXT_BQ_OUTPUT_TABLE",
  "TEXT_BQ_TEMP_BUCKET",
  "TEXT_INPUT_COMPRESSION",
  "TEXT_INPUT_DELIMITER"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_BZIP2",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_DEFLATE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_GZIP",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_LZ4",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_NONE",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "FORMAT_TXT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HEADER",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_INFERSCHEMA",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants. | 
| 305 | 14 | 4,280 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_NUMPARTITIONS | true | 
	statement | 103 | 103 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "FORMAT_AVRO",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "COMPRESSION_BZIP2",
  "COMPRESSION_DEFLATE",
  "COMPRESSION_GZIP",
  "COMPRESSION_LZ4",
  "COMPRESSION_NONE",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "FORMAT_TXT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HEADER",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "INFER_SCHEMA",
  "INPUT_COMPRESSION",
  "INPUT_DELIMITER",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_NUMPARTITIONS",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET",
  "TEXT_BQ_INPUT_INFERSCHEMA",
  "TEXT_BQ_INPUT_LOCATION",
  "TEXT_BQ_LD_TEMP_BUCKET_NAME",
  "TEXT_BQ_OUTPUT_DATASET",
  "TEXT_BQ_OUTPUT_MODE",
  "TEXT_BQ_OUTPUT_TABLE",
  "TEXT_BQ_TEMP_BUCKET",
  "TEXT_INPUT_COMPRESSION",
  "TEXT_INPUT_DELIMITER"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_BZIP2",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_DEFLATE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_GZIP",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_LZ4",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_NONE",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "FORMAT_TXT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HEADER",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_INFERSCHEMA",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants. | 
| 306 | 14 | 4,508 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	OUTPUT_MODE_OVERWRITE | true | 
	statement | 103 | 103 | false | false | 
	[
  "JDBC_URL",
  "JDBC_TABLE",
  "FORMAT_JDBC",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "COMPRESSION_BZIP2",
  "COMPRESSION_DEFLATE",
  "COMPRESSION_GZIP",
  "COMPRESSION_LZ4",
  "COMPRESSION_NONE",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "FORMAT_TXT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HEADER",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "INFER_SCHEMA",
  "INPUT_COMPRESSION",
  "INPUT_DELIMITER",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET",
  "TEXT_BQ_INPUT_INFERSCHEMA",
  "TEXT_BQ_INPUT_LOCATION",
  "TEXT_BQ_LD_TEMP_BUCKET_NAME",
  "TEXT_BQ_OUTPUT_DATASET",
  "TEXT_BQ_OUTPUT_MODE",
  "TEXT_BQ_OUTPUT_TABLE",
  "TEXT_BQ_TEMP_BUCKET",
  "TEXT_INPUT_COMPRESSION",
  "TEXT_INPUT_DELIMITER"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_BZIP2",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_DEFLATE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_GZIP",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_LZ4",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_NONE",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "FORMAT_TXT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HEADER",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_INFERSCHEMA",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants. | 
| 307 | 14 | 4,902 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	parse_args | true | 
	function | 4 | 4 | false | false | 
	[
  "parse_args",
  "run",
  "build",
  "get_logger",
  "__annotations__",
  "__class__",
  "__delattr__",
  "__dict__",
  "__dir__",
  "__eq__",
  "__format__",
  "__getattribute__",
  "__hash__",
  "__init__",
  "__init_subclass__",
  "__ne__",
  "__new__",
  "__reduce__",
  "__reduce_ex__",
  "__repr__",
  "__setattr__",
  "__sizeof__",
  "__slots__",
  "__str__",
  "__subclasshook__",
  "__doc__",
  "__module__"
] | 
	[
  {
    "name": "build",
    "type": "function"
  },
  {
    "name": "get_logger",
    "type": "function"
  },
  {
    "name": "parse_args",
    "type": "function"
  },
  {
    "name": "run",
    "type": "function"
  },
  {
    "name": "__annotations__",
    "type": "statement"
  },
  {
    "name": "__class__",
    "type": "property"
  },
  {
    "name": "__delattr__",
    "type": "function"
  },
  {
    "name": "__dict__",
    "type": "statement"
  },
  {
    "name": "__dir__",
    "type": "function"
  },
  {
    "name": "__doc__",
    "type": "statement"
  },
  {
    "name": "__eq__",
    "type": "function"
  },
  {
    "name": "__format__",
    "type": "function"
  },
  {
    "name": "__getattribute__",
    "type": "function"
  },
  {
    "name": "__hash__",
    "type": "function"
  },
  {
    "name": "__init__",
    "type": "function"
  },
  {
    "name": "__init_subclass__",
    "type": "function"
  },
  {
    "name": "__module__",
    "type": "statement"
  },
  {
    "name": "__ne__",
    "type": "function"
  },
  {
    "name": "__new__",
    "type": "function"
  },
  {
    "name": "__reduce__",
    "type": "function"
  },
  {
    "name": "__reduce_ex__",
    "type": "function"
  },
  {
    "name": "__repr__",
    "type": "function"
  },
  {
    "name": "__setattr__",
    "type": "function"
  },
  {
    "name": "__sizeof__",
    "type": "function"
  },
  {
    "name": "__slots__",
    "type": "statement"
  },
  {
    "name": "__str__",
    "type": "function"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template. | 
| 308 | 14 | 5,601 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	run | true | 
	function | 4 | 4 | false | false | 
	[
  "run",
  "parse_args",
  "build",
  "get_logger",
  "__annotations__",
  "__class__",
  "__delattr__",
  "__dict__",
  "__dir__",
  "__eq__",
  "__format__",
  "__getattribute__",
  "__hash__",
  "__init__",
  "__init_subclass__",
  "__ne__",
  "__new__",
  "__reduce__",
  "__reduce_ex__",
  "__repr__",
  "__setattr__",
  "__sizeof__",
  "__slots__",
  "__str__",
  "__subclasshook__",
  "__doc__",
  "__module__"
] | 
	[
  {
    "name": "build",
    "type": "function"
  },
  {
    "name": "get_logger",
    "type": "function"
  },
  {
    "name": "parse_args",
    "type": "function"
  },
  {
    "name": "run",
    "type": "function"
  },
  {
    "name": "__annotations__",
    "type": "statement"
  },
  {
    "name": "__class__",
    "type": "property"
  },
  {
    "name": "__delattr__",
    "type": "function"
  },
  {
    "name": "__dict__",
    "type": "statement"
  },
  {
    "name": "__dir__",
    "type": "function"
  },
  {
    "name": "__doc__",
    "type": "statement"
  },
  {
    "name": "__eq__",
    "type": "function"
  },
  {
    "name": "__format__",
    "type": "function"
  },
  {
    "name": "__getattribute__",
    "type": "function"
  },
  {
    "name": "__hash__",
    "type": "function"
  },
  {
    "name": "__init__",
    "type": "function"
  },
  {
    "name": "__init_subclass__",
    "type": "function"
  },
  {
    "name": "__module__",
    "type": "statement"
  },
  {
    "name": "__ne__",
    "type": "function"
  },
  {
    "name": "__new__",
    "type": "function"
  },
  {
    "name": "__reduce__",
    "type": "function"
  },
  {
    "name": "__reduce_ex__",
    "type": "function"
  },
  {
    "name": "__repr__",
    "type": "function"
  },
  {
    "name": "__setattr__",
    "type": "function"
  },
  {
    "name": "__sizeof__",
    "type": "function"
  },
  {
    "name": "__slots__",
    "type": "statement"
  },
  {
    "name": "__str__",
    "type": "function"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template. | 
| 309 | 14 | 5,711 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	FORMAT_JDBC | true | 
	statement | 103 | 103 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "FORMAT_AVRO",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "COMPRESSION_BZIP2",
  "COMPRESSION_DEFLATE",
  "COMPRESSION_GZIP",
  "COMPRESSION_LZ4",
  "COMPRESSION_NONE",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "FORMAT_TXT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HEADER",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "INFER_SCHEMA",
  "INPUT_COMPRESSION",
  "INPUT_DELIMITER",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_NUMPARTITIONS",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET",
  "TEXT_BQ_INPUT_INFERSCHEMA",
  "TEXT_BQ_INPUT_LOCATION",
  "TEXT_BQ_LD_TEMP_BUCKET_NAME",
  "TEXT_BQ_OUTPUT_DATASET",
  "TEXT_BQ_OUTPUT_MODE",
  "TEXT_BQ_OUTPUT_TABLE",
  "TEXT_BQ_TEMP_BUCKET",
  "TEXT_INPUT_COMPRESSION",
  "TEXT_INPUT_DELIMITER"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_BZIP2",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_DEFLATE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_GZIP",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_LZ4",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_NONE",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "FORMAT_TXT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HEADER",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_INFERSCHEMA",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants. | 
| 310 | 14 | 5,801 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_URL | true | 
	statement | 103 | 103 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "JDBC_URL",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "COMPRESSION_BZIP2",
  "COMPRESSION_DEFLATE",
  "COMPRESSION_GZIP",
  "COMPRESSION_LZ4",
  "COMPRESSION_NONE",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "FORMAT_TXT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HEADER",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "INFER_SCHEMA",
  "INPUT_COMPRESSION",
  "INPUT_DELIMITER",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET",
  "TEXT_BQ_INPUT_INFERSCHEMA",
  "TEXT_BQ_INPUT_LOCATION",
  "TEXT_BQ_LD_TEMP_BUCKET_NAME",
  "TEXT_BQ_OUTPUT_DATASET",
  "TEXT_BQ_OUTPUT_MODE",
  "TEXT_BQ_OUTPUT_TABLE",
  "TEXT_BQ_TEMP_BUCKET",
  "TEXT_INPUT_COMPRESSION",
  "TEXT_INPUT_DELIMITER"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_BZIP2",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_DEFLATE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_GZIP",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_LZ4",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_NONE",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "FORMAT_TXT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HEADER",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_INFERSCHEMA",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants. | 
| 311 | 14 | 5,904 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_DRIVER | true | 
	statement | 103 | 103 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_NUMPARTITIONS",
  "FORMAT_AVRO",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "COMPRESSION_BZIP2",
  "COMPRESSION_DEFLATE",
  "COMPRESSION_GZIP",
  "COMPRESSION_LZ4",
  "COMPRESSION_NONE",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "FORMAT_TXT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HEADER",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "INFER_SCHEMA",
  "INPUT_COMPRESSION",
  "INPUT_DELIMITER",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_DRIVER",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET",
  "TEXT_BQ_INPUT_INFERSCHEMA",
  "TEXT_BQ_INPUT_LOCATION",
  "TEXT_BQ_LD_TEMP_BUCKET_NAME",
  "TEXT_BQ_OUTPUT_DATASET",
  "TEXT_BQ_OUTPUT_MODE",
  "TEXT_BQ_OUTPUT_TABLE",
  "TEXT_BQ_TEMP_BUCKET",
  "TEXT_INPUT_COMPRESSION",
  "TEXT_INPUT_DELIMITER"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_BZIP2",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_DEFLATE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_GZIP",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_LZ4",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_NONE",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "FORMAT_TXT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HEADER",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_INFERSCHEMA",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants. | 
| 312 | 14 | 6,022 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_TABLE | true | 
	statement | 103 | 103 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "JDBC_TABLE",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "COMPRESSION_BZIP2",
  "COMPRESSION_DEFLATE",
  "COMPRESSION_GZIP",
  "COMPRESSION_LZ4",
  "COMPRESSION_NONE",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "FORMAT_TXT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HEADER",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "INFER_SCHEMA",
  "INPUT_COMPRESSION",
  "INPUT_DELIMITER",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET",
  "TEXT_BQ_INPUT_INFERSCHEMA",
  "TEXT_BQ_INPUT_LOCATION",
  "TEXT_BQ_LD_TEMP_BUCKET_NAME",
  "TEXT_BQ_OUTPUT_DATASET",
  "TEXT_BQ_OUTPUT_MODE",
  "TEXT_BQ_OUTPUT_TABLE",
  "TEXT_BQ_TEMP_BUCKET",
  "TEXT_INPUT_COMPRESSION",
  "TEXT_INPUT_DELIMITER"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_BZIP2",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_DEFLATE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_GZIP",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_LZ4",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_NONE",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "FORMAT_TXT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HEADER",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_INFERSCHEMA",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants. | 
| 313 | 14 | 6,148 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_PARTITIONCOLUMN | true | 
	statement | 103 | 103 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "COMPRESSION_BZIP2",
  "COMPRESSION_DEFLATE",
  "COMPRESSION_GZIP",
  "COMPRESSION_LZ4",
  "COMPRESSION_NONE",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "FORMAT_TXT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HEADER",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "INFER_SCHEMA",
  "INPUT_COMPRESSION",
  "INPUT_DELIMITER",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET",
  "TEXT_BQ_INPUT_INFERSCHEMA",
  "TEXT_BQ_INPUT_LOCATION",
  "TEXT_BQ_LD_TEMP_BUCKET_NAME",
  "TEXT_BQ_OUTPUT_DATASET",
  "TEXT_BQ_OUTPUT_MODE",
  "TEXT_BQ_OUTPUT_TABLE",
  "TEXT_BQ_TEMP_BUCKET",
  "TEXT_INPUT_COMPRESSION",
  "TEXT_INPUT_DELIMITER"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_BZIP2",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_DEFLATE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_GZIP",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_LZ4",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_NONE",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "FORMAT_TXT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HEADER",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_INFERSCHEMA",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants. | 
| 314 | 14 | 6,293 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_LOWERBOUND | true | 
	statement | 103 | 103 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "COMPRESSION_BZIP2",
  "COMPRESSION_DEFLATE",
  "COMPRESSION_GZIP",
  "COMPRESSION_LZ4",
  "COMPRESSION_NONE",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "FORMAT_TXT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HEADER",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "INFER_SCHEMA",
  "INPUT_COMPRESSION",
  "INPUT_DELIMITER",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET",
  "TEXT_BQ_INPUT_INFERSCHEMA",
  "TEXT_BQ_INPUT_LOCATION",
  "TEXT_BQ_LD_TEMP_BUCKET_NAME",
  "TEXT_BQ_OUTPUT_DATASET",
  "TEXT_BQ_OUTPUT_MODE",
  "TEXT_BQ_OUTPUT_TABLE",
  "TEXT_BQ_TEMP_BUCKET",
  "TEXT_INPUT_COMPRESSION",
  "TEXT_INPUT_DELIMITER"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_BZIP2",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_DEFLATE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_GZIP",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_LZ4",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_NONE",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "FORMAT_TXT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HEADER",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_INFERSCHEMA",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants. | 
| 315 | 14 | 6,437 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_UPPERBOUND | true | 
	statement | 103 | 103 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "COMPRESSION_BZIP2",
  "COMPRESSION_DEFLATE",
  "COMPRESSION_GZIP",
  "COMPRESSION_LZ4",
  "COMPRESSION_NONE",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "FORMAT_TXT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HEADER",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "INFER_SCHEMA",
  "INPUT_COMPRESSION",
  "INPUT_DELIMITER",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET",
  "TEXT_BQ_INPUT_INFERSCHEMA",
  "TEXT_BQ_INPUT_LOCATION",
  "TEXT_BQ_LD_TEMP_BUCKET_NAME",
  "TEXT_BQ_OUTPUT_DATASET",
  "TEXT_BQ_OUTPUT_MODE",
  "TEXT_BQ_OUTPUT_TABLE",
  "TEXT_BQ_TEMP_BUCKET",
  "TEXT_INPUT_COMPRESSION",
  "TEXT_INPUT_DELIMITER"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_BZIP2",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_DEFLATE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_GZIP",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_LZ4",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_NONE",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "FORMAT_TXT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HEADER",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_INFERSCHEMA",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants. | 
| 316 | 14 | 6,590 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_NUMPARTITIONS | true | 
	statement | 103 | 103 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "FORMAT_AVRO",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "COMPRESSION_BZIP2",
  "COMPRESSION_DEFLATE",
  "COMPRESSION_GZIP",
  "COMPRESSION_LZ4",
  "COMPRESSION_NONE",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "FORMAT_TXT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HEADER",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "INFER_SCHEMA",
  "INPUT_COMPRESSION",
  "INPUT_DELIMITER",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_NUMPARTITIONS",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET",
  "TEXT_BQ_INPUT_INFERSCHEMA",
  "TEXT_BQ_INPUT_LOCATION",
  "TEXT_BQ_LD_TEMP_BUCKET_NAME",
  "TEXT_BQ_OUTPUT_DATASET",
  "TEXT_BQ_OUTPUT_MODE",
  "TEXT_BQ_OUTPUT_TABLE",
  "TEXT_BQ_TEMP_BUCKET",
  "TEXT_INPUT_COMPRESSION",
  "TEXT_INPUT_DELIMITER"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_BZIP2",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_DEFLATE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_GZIP",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_LZ4",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_NONE",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "FORMAT_TXT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HEADER",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_INFERSCHEMA",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants. | 
| 317 | 14 | 6,818 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	OUTPUT_MODE_APPEND | true | 
	statement | 103 | 103 | false | false | 
	[
  "JDBC_URL",
  "JDBC_TABLE",
  "FORMAT_JDBC",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "COMPRESSION_BZIP2",
  "COMPRESSION_DEFLATE",
  "COMPRESSION_GZIP",
  "COMPRESSION_LZ4",
  "COMPRESSION_NONE",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "FORMAT_TXT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HEADER",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "INFER_SCHEMA",
  "INPUT_COMPRESSION",
  "INPUT_DELIMITER",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET",
  "TEXT_BQ_INPUT_INFERSCHEMA",
  "TEXT_BQ_INPUT_LOCATION",
  "TEXT_BQ_LD_TEMP_BUCKET_NAME",
  "TEXT_BQ_OUTPUT_DATASET",
  "TEXT_BQ_OUTPUT_MODE",
  "TEXT_BQ_OUTPUT_TABLE",
  "TEXT_BQ_TEMP_BUCKET",
  "TEXT_INPUT_COMPRESSION",
  "TEXT_INPUT_DELIMITER"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_BZIP2",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_DEFLATE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_GZIP",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_LZ4",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_NONE",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "FORMAT_TXT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HEADER",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_INFERSCHEMA",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants. | 
| 318 | 14 | 6,939 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	FORMAT_AVRO | true | 
	statement | 103 | 103 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "COMPRESSION_BZIP2",
  "COMPRESSION_DEFLATE",
  "COMPRESSION_GZIP",
  "COMPRESSION_LZ4",
  "COMPRESSION_NONE",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "FORMAT_TXT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HEADER",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "INFER_SCHEMA",
  "INPUT_COMPRESSION",
  "INPUT_DELIMITER",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET",
  "TEXT_BQ_INPUT_INFERSCHEMA",
  "TEXT_BQ_INPUT_LOCATION",
  "TEXT_BQ_LD_TEMP_BUCKET_NAME",
  "TEXT_BQ_OUTPUT_DATASET",
  "TEXT_BQ_OUTPUT_MODE",
  "TEXT_BQ_OUTPUT_TABLE",
  "TEXT_BQ_TEMP_BUCKET",
  "TEXT_INPUT_COMPRESSION",
  "TEXT_INPUT_DELIMITER"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_BZIP2",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_DEFLATE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_GZIP",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_LZ4",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_NONE",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "FORMAT_TXT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HEADER",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_INFERSCHEMA",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants. | 
| 319 | 14 | 7,328 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	parse_args | true | 
	function | 4 | 4 | false | false | 
	[
  "parse_args",
  "run",
  "build",
  "get_logger",
  "__annotations__",
  "__class__",
  "__delattr__",
  "__dict__",
  "__dir__",
  "__eq__",
  "__format__",
  "__getattribute__",
  "__hash__",
  "__init__",
  "__init_subclass__",
  "__ne__",
  "__new__",
  "__reduce__",
  "__reduce_ex__",
  "__repr__",
  "__setattr__",
  "__sizeof__",
  "__slots__",
  "__str__",
  "__subclasshook__",
  "__doc__",
  "__module__"
] | 
	[
  {
    "name": "build",
    "type": "function"
  },
  {
    "name": "get_logger",
    "type": "function"
  },
  {
    "name": "parse_args",
    "type": "function"
  },
  {
    "name": "run",
    "type": "function"
  },
  {
    "name": "__annotations__",
    "type": "statement"
  },
  {
    "name": "__class__",
    "type": "property"
  },
  {
    "name": "__delattr__",
    "type": "function"
  },
  {
    "name": "__dict__",
    "type": "statement"
  },
  {
    "name": "__dir__",
    "type": "function"
  },
  {
    "name": "__doc__",
    "type": "statement"
  },
  {
    "name": "__eq__",
    "type": "function"
  },
  {
    "name": "__format__",
    "type": "function"
  },
  {
    "name": "__getattribute__",
    "type": "function"
  },
  {
    "name": "__hash__",
    "type": "function"
  },
  {
    "name": "__init__",
    "type": "function"
  },
  {
    "name": "__init_subclass__",
    "type": "function"
  },
  {
    "name": "__module__",
    "type": "statement"
  },
  {
    "name": "__ne__",
    "type": "function"
  },
  {
    "name": "__new__",
    "type": "function"
  },
  {
    "name": "__reduce__",
    "type": "function"
  },
  {
    "name": "__reduce_ex__",
    "type": "function"
  },
  {
    "name": "__repr__",
    "type": "function"
  },
  {
    "name": "__setattr__",
    "type": "function"
  },
  {
    "name": "__sizeof__",
    "type": "function"
  },
  {
    "name": "__slots__",
    "type": "statement"
  },
  {
    "name": "__str__",
    "type": "function"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template. | 
| 320 | 14 | 8,026 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	common | 
	run | true | 
	function | 4 | 4 | false | false | 
	[
  "run",
  "parse_args",
  "build",
  "get_logger",
  "__annotations__",
  "__class__",
  "__delattr__",
  "__dict__",
  "__dir__",
  "__eq__",
  "__format__",
  "__getattribute__",
  "__hash__",
  "__init__",
  "__init_subclass__",
  "__ne__",
  "__new__",
  "__reduce__",
  "__reduce_ex__",
  "__repr__",
  "__setattr__",
  "__sizeof__",
  "__slots__",
  "__str__",
  "__subclasshook__",
  "__doc__",
  "__module__"
] | 
	[
  {
    "name": "build",
    "type": "function"
  },
  {
    "name": "get_logger",
    "type": "function"
  },
  {
    "name": "parse_args",
    "type": "function"
  },
  {
    "name": "run",
    "type": "function"
  },
  {
    "name": "__annotations__",
    "type": "statement"
  },
  {
    "name": "__class__",
    "type": "property"
  },
  {
    "name": "__delattr__",
    "type": "function"
  },
  {
    "name": "__dict__",
    "type": "statement"
  },
  {
    "name": "__dir__",
    "type": "function"
  },
  {
    "name": "__doc__",
    "type": "statement"
  },
  {
    "name": "__eq__",
    "type": "function"
  },
  {
    "name": "__format__",
    "type": "function"
  },
  {
    "name": "__getattribute__",
    "type": "function"
  },
  {
    "name": "__hash__",
    "type": "function"
  },
  {
    "name": "__init__",
    "type": "function"
  },
  {
    "name": "__init_subclass__",
    "type": "function"
  },
  {
    "name": "__module__",
    "type": "statement"
  },
  {
    "name": "__ne__",
    "type": "function"
  },
  {
    "name": "__new__",
    "type": "function"
  },
  {
    "name": "__reduce__",
    "type": "function"
  },
  {
    "name": "__reduce_ex__",
    "type": "function"
  },
  {
    "name": "__repr__",
    "type": "function"
  },
  {
    "name": "__setattr__",
    "type": "function"
  },
  {
    "name": "__sizeof__",
    "type": "function"
  },
  {
    "name": "__slots__",
    "type": "statement"
  },
  {
    "name": "__str__",
    "type": "function"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template. | 
| 321 | 14 | 8,136 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	FORMAT_JDBC | true | 
	statement | 103 | 103 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "FORMAT_AVRO",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "COMPRESSION_BZIP2",
  "COMPRESSION_DEFLATE",
  "COMPRESSION_GZIP",
  "COMPRESSION_LZ4",
  "COMPRESSION_NONE",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "FORMAT_TXT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HEADER",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "INFER_SCHEMA",
  "INPUT_COMPRESSION",
  "INPUT_DELIMITER",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_NUMPARTITIONS",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET",
  "TEXT_BQ_INPUT_INFERSCHEMA",
  "TEXT_BQ_INPUT_LOCATION",
  "TEXT_BQ_LD_TEMP_BUCKET_NAME",
  "TEXT_BQ_OUTPUT_DATASET",
  "TEXT_BQ_OUTPUT_MODE",
  "TEXT_BQ_OUTPUT_TABLE",
  "TEXT_BQ_TEMP_BUCKET",
  "TEXT_INPUT_COMPRESSION",
  "TEXT_INPUT_DELIMITER"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_BZIP2",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_DEFLATE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_GZIP",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_LZ4",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_NONE",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "FORMAT_TXT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HEADER",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_INFERSCHEMA",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants. | 
| 322 | 14 | 8,226 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_URL | true | 
	statement | 103 | 103 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "JDBC_URL",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "COMPRESSION_BZIP2",
  "COMPRESSION_DEFLATE",
  "COMPRESSION_GZIP",
  "COMPRESSION_LZ4",
  "COMPRESSION_NONE",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "FORMAT_TXT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HEADER",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "INFER_SCHEMA",
  "INPUT_COMPRESSION",
  "INPUT_DELIMITER",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET",
  "TEXT_BQ_INPUT_INFERSCHEMA",
  "TEXT_BQ_INPUT_LOCATION",
  "TEXT_BQ_LD_TEMP_BUCKET_NAME",
  "TEXT_BQ_OUTPUT_DATASET",
  "TEXT_BQ_OUTPUT_MODE",
  "TEXT_BQ_OUTPUT_TABLE",
  "TEXT_BQ_TEMP_BUCKET",
  "TEXT_INPUT_COMPRESSION",
  "TEXT_INPUT_DELIMITER"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_BZIP2",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_DEFLATE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_GZIP",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_LZ4",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_NONE",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "FORMAT_TXT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HEADER",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_INFERSCHEMA",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants. | 
| 323 | 14 | 8,329 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_DRIVER | true | 
	statement | 103 | 103 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_NUMPARTITIONS",
  "FORMAT_AVRO",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "COMPRESSION_BZIP2",
  "COMPRESSION_DEFLATE",
  "COMPRESSION_GZIP",
  "COMPRESSION_LZ4",
  "COMPRESSION_NONE",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "FORMAT_TXT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HEADER",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "INFER_SCHEMA",
  "INPUT_COMPRESSION",
  "INPUT_DELIMITER",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_DRIVER",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET",
  "TEXT_BQ_INPUT_INFERSCHEMA",
  "TEXT_BQ_INPUT_LOCATION",
  "TEXT_BQ_LD_TEMP_BUCKET_NAME",
  "TEXT_BQ_OUTPUT_DATASET",
  "TEXT_BQ_OUTPUT_MODE",
  "TEXT_BQ_OUTPUT_TABLE",
  "TEXT_BQ_TEMP_BUCKET",
  "TEXT_INPUT_COMPRESSION",
  "TEXT_INPUT_DELIMITER"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_BZIP2",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_DEFLATE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_GZIP",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_LZ4",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_NONE",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "FORMAT_TXT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HEADER",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_INFERSCHEMA",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants. | 
| 324 | 14 | 8,447 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_TABLE | true | 
	statement | 103 | 103 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "JDBC_TABLE",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "COMPRESSION_BZIP2",
  "COMPRESSION_DEFLATE",
  "COMPRESSION_GZIP",
  "COMPRESSION_LZ4",
  "COMPRESSION_NONE",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "FORMAT_TXT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HEADER",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "INFER_SCHEMA",
  "INPUT_COMPRESSION",
  "INPUT_DELIMITER",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET",
  "TEXT_BQ_INPUT_INFERSCHEMA",
  "TEXT_BQ_INPUT_LOCATION",
  "TEXT_BQ_LD_TEMP_BUCKET_NAME",
  "TEXT_BQ_OUTPUT_DATASET",
  "TEXT_BQ_OUTPUT_MODE",
  "TEXT_BQ_OUTPUT_TABLE",
  "TEXT_BQ_TEMP_BUCKET",
  "TEXT_INPUT_COMPRESSION",
  "TEXT_INPUT_DELIMITER"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_BZIP2",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_DEFLATE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_GZIP",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_LZ4",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_NONE",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "FORMAT_TXT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HEADER",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_INFERSCHEMA",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants. | 
| 325 | 14 | 8,573 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_PARTITIONCOLUMN | true | 
	statement | 103 | 103 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "COMPRESSION_BZIP2",
  "COMPRESSION_DEFLATE",
  "COMPRESSION_GZIP",
  "COMPRESSION_LZ4",
  "COMPRESSION_NONE",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "FORMAT_TXT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HEADER",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "INFER_SCHEMA",
  "INPUT_COMPRESSION",
  "INPUT_DELIMITER",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET",
  "TEXT_BQ_INPUT_INFERSCHEMA",
  "TEXT_BQ_INPUT_LOCATION",
  "TEXT_BQ_LD_TEMP_BUCKET_NAME",
  "TEXT_BQ_OUTPUT_DATASET",
  "TEXT_BQ_OUTPUT_MODE",
  "TEXT_BQ_OUTPUT_TABLE",
  "TEXT_BQ_TEMP_BUCKET",
  "TEXT_INPUT_COMPRESSION",
  "TEXT_INPUT_DELIMITER"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_BZIP2",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_DEFLATE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_GZIP",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_LZ4",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_NONE",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "FORMAT_TXT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HEADER",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_INFERSCHEMA",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants. | 
| 326 | 14 | 8,718 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_LOWERBOUND | true | 
	statement | 103 | 103 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "COMPRESSION_BZIP2",
  "COMPRESSION_DEFLATE",
  "COMPRESSION_GZIP",
  "COMPRESSION_LZ4",
  "COMPRESSION_NONE",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "FORMAT_TXT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HEADER",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "INFER_SCHEMA",
  "INPUT_COMPRESSION",
  "INPUT_DELIMITER",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET",
  "TEXT_BQ_INPUT_INFERSCHEMA",
  "TEXT_BQ_INPUT_LOCATION",
  "TEXT_BQ_LD_TEMP_BUCKET_NAME",
  "TEXT_BQ_OUTPUT_DATASET",
  "TEXT_BQ_OUTPUT_MODE",
  "TEXT_BQ_OUTPUT_TABLE",
  "TEXT_BQ_TEMP_BUCKET",
  "TEXT_INPUT_COMPRESSION",
  "TEXT_INPUT_DELIMITER"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_BZIP2",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_DEFLATE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_GZIP",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_LZ4",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_NONE",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "FORMAT_TXT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HEADER",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_INFERSCHEMA",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants. | 
| 327 | 14 | 8,862 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_UPPERBOUND | true | 
	statement | 103 | 103 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "COMPRESSION_BZIP2",
  "COMPRESSION_DEFLATE",
  "COMPRESSION_GZIP",
  "COMPRESSION_LZ4",
  "COMPRESSION_NONE",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "FORMAT_TXT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HEADER",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "INFER_SCHEMA",
  "INPUT_COMPRESSION",
  "INPUT_DELIMITER",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET",
  "TEXT_BQ_INPUT_INFERSCHEMA",
  "TEXT_BQ_INPUT_LOCATION",
  "TEXT_BQ_LD_TEMP_BUCKET_NAME",
  "TEXT_BQ_OUTPUT_DATASET",
  "TEXT_BQ_OUTPUT_MODE",
  "TEXT_BQ_OUTPUT_TABLE",
  "TEXT_BQ_TEMP_BUCKET",
  "TEXT_INPUT_COMPRESSION",
  "TEXT_INPUT_DELIMITER"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_BZIP2",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_DEFLATE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_GZIP",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_LZ4",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_NONE",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "FORMAT_TXT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HEADER",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_INFERSCHEMA",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants. | 
| 328 | 14 | 9,015 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_NUMPARTITIONS | true | 
	statement | 103 | 103 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "FORMAT_AVRO",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "COMPRESSION_BZIP2",
  "COMPRESSION_DEFLATE",
  "COMPRESSION_GZIP",
  "COMPRESSION_LZ4",
  "COMPRESSION_NONE",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "FORMAT_TXT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HEADER",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "INFER_SCHEMA",
  "INPUT_COMPRESSION",
  "INPUT_DELIMITER",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_NUMPARTITIONS",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET",
  "TEXT_BQ_INPUT_INFERSCHEMA",
  "TEXT_BQ_INPUT_LOCATION",
  "TEXT_BQ_LD_TEMP_BUCKET_NAME",
  "TEXT_BQ_OUTPUT_DATASET",
  "TEXT_BQ_OUTPUT_MODE",
  "TEXT_BQ_OUTPUT_TABLE",
  "TEXT_BQ_TEMP_BUCKET",
  "TEXT_INPUT_COMPRESSION",
  "TEXT_INPUT_DELIMITER"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_BZIP2",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_DEFLATE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_GZIP",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_LZ4",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_NONE",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "FORMAT_TXT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HEADER",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_INFERSCHEMA",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants. | 
| 329 | 14 | 9,243 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	OUTPUT_MODE_IGNORE | true | 
	statement | 103 | 103 | false | false | 
	[
  "JDBC_URL",
  "JDBC_TABLE",
  "FORMAT_JDBC",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "COMPRESSION_BZIP2",
  "COMPRESSION_DEFLATE",
  "COMPRESSION_GZIP",
  "COMPRESSION_LZ4",
  "COMPRESSION_NONE",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "FORMAT_TXT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HEADER",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "INFER_SCHEMA",
  "INPUT_COMPRESSION",
  "INPUT_DELIMITER",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET",
  "TEXT_BQ_INPUT_INFERSCHEMA",
  "TEXT_BQ_INPUT_LOCATION",
  "TEXT_BQ_LD_TEMP_BUCKET_NAME",
  "TEXT_BQ_OUTPUT_DATASET",
  "TEXT_BQ_OUTPUT_MODE",
  "TEXT_BQ_OUTPUT_TABLE",
  "TEXT_BQ_TEMP_BUCKET",
  "TEXT_INPUT_COMPRESSION",
  "TEXT_INPUT_DELIMITER"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_BZIP2",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_DEFLATE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_GZIP",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_LZ4",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_NONE",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "FORMAT_TXT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HEADER",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_INFERSCHEMA",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants. | 
| 330 | 14 | 9,758 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	inproject | 
	parse_args | true | 
	function | 4 | 4 | false | false | 
	[
  "parse_args",
  "run",
  "build",
  "get_logger",
  "__annotations__",
  "__class__",
  "__delattr__",
  "__dict__",
  "__dir__",
  "__eq__",
  "__format__",
  "__getattribute__",
  "__hash__",
  "__init__",
  "__init_subclass__",
  "__ne__",
  "__new__",
  "__reduce__",
  "__reduce_ex__",
  "__repr__",
  "__setattr__",
  "__sizeof__",
  "__slots__",
  "__str__",
  "__subclasshook__",
  "__doc__",
  "__module__"
] | 
	[
  {
    "name": "build",
    "type": "function"
  },
  {
    "name": "get_logger",
    "type": "function"
  },
  {
    "name": "parse_args",
    "type": "function"
  },
  {
    "name": "run",
    "type": "function"
  },
  {
    "name": "__annotations__",
    "type": "statement"
  },
  {
    "name": "__class__",
    "type": "property"
  },
  {
    "name": "__delattr__",
    "type": "function"
  },
  {
    "name": "__dict__",
    "type": "statement"
  },
  {
    "name": "__dir__",
    "type": "function"
  },
  {
    "name": "__doc__",
    "type": "statement"
  },
  {
    "name": "__eq__",
    "type": "function"
  },
  {
    "name": "__format__",
    "type": "function"
  },
  {
    "name": "__getattribute__",
    "type": "function"
  },
  {
    "name": "__hash__",
    "type": "function"
  },
  {
    "name": "__init__",
    "type": "function"
  },
  {
    "name": "__init_subclass__",
    "type": "function"
  },
  {
    "name": "__module__",
    "type": "statement"
  },
  {
    "name": "__ne__",
    "type": "function"
  },
  {
    "name": "__new__",
    "type": "function"
  },
  {
    "name": "__reduce__",
    "type": "function"
  },
  {
    "name": "__reduce_ex__",
    "type": "function"
  },
  {
    "name": "__repr__",
    "type": "function"
  },
  {
    "name": "__setattr__",
    "type": "function"
  },
  {
    "name": "__sizeof__",
    "type": "function"
  },
  {
    "name": "__slots__",
    "type": "statement"
  },
  {
    "name": "__str__",
    "type": "function"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template. | 
| 331 | 14 | 10,457 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	run | true | 
	function | 4 | 4 | false | false | 
	[
  "run",
  "parse_args",
  "build",
  "get_logger",
  "__annotations__",
  "__class__",
  "__delattr__",
  "__dict__",
  "__dir__",
  "__eq__",
  "__format__",
  "__getattribute__",
  "__hash__",
  "__init__",
  "__init_subclass__",
  "__ne__",
  "__new__",
  "__reduce__",
  "__reduce_ex__",
  "__repr__",
  "__setattr__",
  "__sizeof__",
  "__slots__",
  "__str__",
  "__subclasshook__",
  "__doc__",
  "__module__"
] | 
	[
  {
    "name": "build",
    "type": "function"
  },
  {
    "name": "get_logger",
    "type": "function"
  },
  {
    "name": "parse_args",
    "type": "function"
  },
  {
    "name": "run",
    "type": "function"
  },
  {
    "name": "__annotations__",
    "type": "statement"
  },
  {
    "name": "__class__",
    "type": "property"
  },
  {
    "name": "__delattr__",
    "type": "function"
  },
  {
    "name": "__dict__",
    "type": "statement"
  },
  {
    "name": "__dir__",
    "type": "function"
  },
  {
    "name": "__doc__",
    "type": "statement"
  },
  {
    "name": "__eq__",
    "type": "function"
  },
  {
    "name": "__format__",
    "type": "function"
  },
  {
    "name": "__getattribute__",
    "type": "function"
  },
  {
    "name": "__hash__",
    "type": "function"
  },
  {
    "name": "__init__",
    "type": "function"
  },
  {
    "name": "__init_subclass__",
    "type": "function"
  },
  {
    "name": "__module__",
    "type": "statement"
  },
  {
    "name": "__ne__",
    "type": "function"
  },
  {
    "name": "__new__",
    "type": "function"
  },
  {
    "name": "__reduce__",
    "type": "function"
  },
  {
    "name": "__reduce_ex__",
    "type": "function"
  },
  {
    "name": "__repr__",
    "type": "function"
  },
  {
    "name": "__setattr__",
    "type": "function"
  },
  {
    "name": "__sizeof__",
    "type": "function"
  },
  {
    "name": "__slots__",
    "type": "statement"
  },
  {
    "name": "__str__",
    "type": "function"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=json",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template. | 
| 332 | 14 | 10,567 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	FORMAT_JDBC | true | 
	statement | 103 | 103 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "FORMAT_AVRO",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "COMPRESSION_BZIP2",
  "COMPRESSION_DEFLATE",
  "COMPRESSION_GZIP",
  "COMPRESSION_LZ4",
  "COMPRESSION_NONE",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "FORMAT_TXT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HEADER",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "INFER_SCHEMA",
  "INPUT_COMPRESSION",
  "INPUT_DELIMITER",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_NUMPARTITIONS",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET",
  "TEXT_BQ_INPUT_INFERSCHEMA",
  "TEXT_BQ_INPUT_LOCATION",
  "TEXT_BQ_LD_TEMP_BUCKET_NAME",
  "TEXT_BQ_OUTPUT_DATASET",
  "TEXT_BQ_OUTPUT_MODE",
  "TEXT_BQ_OUTPUT_TABLE",
  "TEXT_BQ_TEMP_BUCKET",
  "TEXT_INPUT_COMPRESSION",
  "TEXT_INPUT_DELIMITER"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_BZIP2",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_DEFLATE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_GZIP",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_LZ4",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_NONE",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "FORMAT_TXT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HEADER",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_INFERSCHEMA",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=json",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants. | 
| 333 | 14 | 10,657 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_URL | true | 
	statement | 103 | 103 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "JDBC_URL",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "COMPRESSION_BZIP2",
  "COMPRESSION_DEFLATE",
  "COMPRESSION_GZIP",
  "COMPRESSION_LZ4",
  "COMPRESSION_NONE",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "FORMAT_TXT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HEADER",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "INFER_SCHEMA",
  "INPUT_COMPRESSION",
  "INPUT_DELIMITER",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET",
  "TEXT_BQ_INPUT_INFERSCHEMA",
  "TEXT_BQ_INPUT_LOCATION",
  "TEXT_BQ_LD_TEMP_BUCKET_NAME",
  "TEXT_BQ_OUTPUT_DATASET",
  "TEXT_BQ_OUTPUT_MODE",
  "TEXT_BQ_OUTPUT_TABLE",
  "TEXT_BQ_TEMP_BUCKET",
  "TEXT_INPUT_COMPRESSION",
  "TEXT_INPUT_DELIMITER"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_BZIP2",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_DEFLATE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_GZIP",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_LZ4",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_NONE",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "FORMAT_TXT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HEADER",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_INFERSCHEMA",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=json",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants. | 
| 334 | 14 | 10,760 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_DRIVER | true | 
	statement | 103 | 103 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_NUMPARTITIONS",
  "FORMAT_AVRO",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "COMPRESSION_BZIP2",
  "COMPRESSION_DEFLATE",
  "COMPRESSION_GZIP",
  "COMPRESSION_LZ4",
  "COMPRESSION_NONE",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "FORMAT_TXT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HEADER",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "INFER_SCHEMA",
  "INPUT_COMPRESSION",
  "INPUT_DELIMITER",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_DRIVER",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET",
  "TEXT_BQ_INPUT_INFERSCHEMA",
  "TEXT_BQ_INPUT_LOCATION",
  "TEXT_BQ_LD_TEMP_BUCKET_NAME",
  "TEXT_BQ_OUTPUT_DATASET",
  "TEXT_BQ_OUTPUT_MODE",
  "TEXT_BQ_OUTPUT_TABLE",
  "TEXT_BQ_TEMP_BUCKET",
  "TEXT_INPUT_COMPRESSION",
  "TEXT_INPUT_DELIMITER"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_BZIP2",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_DEFLATE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_GZIP",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_LZ4",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_NONE",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "FORMAT_TXT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HEADER",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_INFERSCHEMA",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=json",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants. | 
| 335 | 14 | 10,878 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_TABLE | true | 
	statement | 103 | 103 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "JDBC_TABLE",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "COMPRESSION_BZIP2",
  "COMPRESSION_DEFLATE",
  "COMPRESSION_GZIP",
  "COMPRESSION_LZ4",
  "COMPRESSION_NONE",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "FORMAT_TXT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HEADER",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "INFER_SCHEMA",
  "INPUT_COMPRESSION",
  "INPUT_DELIMITER",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET",
  "TEXT_BQ_INPUT_INFERSCHEMA",
  "TEXT_BQ_INPUT_LOCATION",
  "TEXT_BQ_LD_TEMP_BUCKET_NAME",
  "TEXT_BQ_OUTPUT_DATASET",
  "TEXT_BQ_OUTPUT_MODE",
  "TEXT_BQ_OUTPUT_TABLE",
  "TEXT_BQ_TEMP_BUCKET",
  "TEXT_INPUT_COMPRESSION",
  "TEXT_INPUT_DELIMITER"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_BZIP2",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_DEFLATE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_GZIP",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_LZ4",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_NONE",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "FORMAT_TXT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HEADER",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_INFERSCHEMA",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=json",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants. | 
| 336 | 14 | 11,004 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_PARTITIONCOLUMN | true | 
	statement | 103 | 103 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "COMPRESSION_BZIP2",
  "COMPRESSION_DEFLATE",
  "COMPRESSION_GZIP",
  "COMPRESSION_LZ4",
  "COMPRESSION_NONE",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "FORMAT_TXT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HEADER",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "INFER_SCHEMA",
  "INPUT_COMPRESSION",
  "INPUT_DELIMITER",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET",
  "TEXT_BQ_INPUT_INFERSCHEMA",
  "TEXT_BQ_INPUT_LOCATION",
  "TEXT_BQ_LD_TEMP_BUCKET_NAME",
  "TEXT_BQ_OUTPUT_DATASET",
  "TEXT_BQ_OUTPUT_MODE",
  "TEXT_BQ_OUTPUT_TABLE",
  "TEXT_BQ_TEMP_BUCKET",
  "TEXT_INPUT_COMPRESSION",
  "TEXT_INPUT_DELIMITER"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_BZIP2",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_DEFLATE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_GZIP",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_LZ4",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_NONE",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "FORMAT_TXT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HEADER",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_INFERSCHEMA",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=json",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants. | 
| 337 | 14 | 11,149 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_LOWERBOUND | true | 
	statement | 103 | 103 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "COMPRESSION_BZIP2",
  "COMPRESSION_DEFLATE",
  "COMPRESSION_GZIP",
  "COMPRESSION_LZ4",
  "COMPRESSION_NONE",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "FORMAT_TXT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HEADER",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "INFER_SCHEMA",
  "INPUT_COMPRESSION",
  "INPUT_DELIMITER",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET",
  "TEXT_BQ_INPUT_INFERSCHEMA",
  "TEXT_BQ_INPUT_LOCATION",
  "TEXT_BQ_LD_TEMP_BUCKET_NAME",
  "TEXT_BQ_OUTPUT_DATASET",
  "TEXT_BQ_OUTPUT_MODE",
  "TEXT_BQ_OUTPUT_TABLE",
  "TEXT_BQ_TEMP_BUCKET",
  "TEXT_INPUT_COMPRESSION",
  "TEXT_INPUT_DELIMITER"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_BZIP2",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_DEFLATE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_GZIP",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_LZ4",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_NONE",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "FORMAT_TXT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HEADER",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_INFERSCHEMA",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=json",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants. | 
| 338 | 14 | 11,293 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_UPPERBOUND | true | 
	statement | 103 | 103 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "COMPRESSION_BZIP2",
  "COMPRESSION_DEFLATE",
  "COMPRESSION_GZIP",
  "COMPRESSION_LZ4",
  "COMPRESSION_NONE",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "FORMAT_TXT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HEADER",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "INFER_SCHEMA",
  "INPUT_COMPRESSION",
  "INPUT_DELIMITER",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET",
  "TEXT_BQ_INPUT_INFERSCHEMA",
  "TEXT_BQ_INPUT_LOCATION",
  "TEXT_BQ_LD_TEMP_BUCKET_NAME",
  "TEXT_BQ_OUTPUT_DATASET",
  "TEXT_BQ_OUTPUT_MODE",
  "TEXT_BQ_OUTPUT_TABLE",
  "TEXT_BQ_TEMP_BUCKET",
  "TEXT_INPUT_COMPRESSION",
  "TEXT_INPUT_DELIMITER"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_BZIP2",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_DEFLATE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_GZIP",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_LZ4",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_NONE",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "FORMAT_TXT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HEADER",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_INFERSCHEMA",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=json",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants. | 
| 339 | 14 | 11,446 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	JDBC_NUMPARTITIONS | true | 
	statement | 103 | 103 | false | false | 
	[
  "FORMAT_JDBC",
  "JDBC_URL",
  "JDBC_TABLE",
  "JDBC_DRIVER",
  "FORMAT_AVRO",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "COMPRESSION_BZIP2",
  "COMPRESSION_DEFLATE",
  "COMPRESSION_GZIP",
  "COMPRESSION_LZ4",
  "COMPRESSION_NONE",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "FORMAT_TXT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HEADER",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "INFER_SCHEMA",
  "INPUT_COMPRESSION",
  "INPUT_DELIMITER",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_NUMPARTITIONS",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET",
  "TEXT_BQ_INPUT_INFERSCHEMA",
  "TEXT_BQ_INPUT_LOCATION",
  "TEXT_BQ_LD_TEMP_BUCKET_NAME",
  "TEXT_BQ_OUTPUT_DATASET",
  "TEXT_BQ_OUTPUT_MODE",
  "TEXT_BQ_OUTPUT_TABLE",
  "TEXT_BQ_TEMP_BUCKET",
  "TEXT_INPUT_COMPRESSION",
  "TEXT_INPUT_DELIMITER"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_BZIP2",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_DEFLATE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_GZIP",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_LZ4",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_NONE",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "FORMAT_TXT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HEADER",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_INFERSCHEMA",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=json",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants. | 
| 340 | 14 | 11,674 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	OUTPUT_MODE_IGNORE | true | 
	statement | 103 | 103 | false | false | 
	[
  "JDBC_URL",
  "JDBC_TABLE",
  "FORMAT_JDBC",
  "JDBC_DRIVER",
  "JDBC_NUMPARTITIONS",
  "BQ_GCS_INPUT_TABLE",
  "BQ_GCS_OUTPUT_FORMAT",
  "BQ_GCS_OUTPUT_LOCATION",
  "BQ_GCS_OUTPUT_MODE",
  "COMPRESSION_BZIP2",
  "COMPRESSION_DEFLATE",
  "COMPRESSION_GZIP",
  "COMPRESSION_LZ4",
  "COMPRESSION_NONE",
  "FORMAT_AVRO",
  "FORMAT_AVRO_EXTD",
  "FORMAT_BIGQUERY",
  "FORMAT_CSV",
  "FORMAT_HBASE",
  "FORMAT_JSON",
  "FORMAT_PRQT",
  "FORMAT_TXT",
  "GCS_BQ_INPUT_FORMAT",
  "GCS_BQ_INPUT_LOCATION",
  "GCS_BQ_LD_TEMP_BUCKET_NAME",
  "GCS_BQ_OUTPUT_DATASET",
  "GCS_BQ_OUTPUT_MODE",
  "GCS_BQ_OUTPUT_TABLE",
  "GCS_BQ_TEMP_BUCKET",
  "GCS_BT_HBASE_CATALOG_JSON",
  "GCS_BT_INPUT_FORMAT",
  "GCS_BT_INPUT_LOCATION",
  "GCS_JDBC_BATCH_SIZE",
  "GCS_JDBC_INPUT_FORMAT",
  "GCS_JDBC_INPUT_LOCATION",
  "GCS_JDBC_OUTPUT_DRIVER",
  "GCS_JDBC_OUTPUT_MODE",
  "GCS_JDBC_OUTPUT_TABLE",
  "GCS_JDBC_OUTPUT_URL",
  "HBASE_GCS_CATALOG_JSON",
  "HBASE_GCS_OUTPUT_FORMAT",
  "HBASE_GCS_OUTPUT_LOCATION",
  "HBASE_GCS_OUTPUT_MODE",
  "HEADER",
  "HIVE_BQ_INPUT_DATABASE",
  "HIVE_BQ_INPUT_TABLE",
  "HIVE_BQ_LD_TEMP_BUCKET_NAME",
  "HIVE_BQ_OUTPUT_DATASET",
  "HIVE_BQ_OUTPUT_MODE",
  "HIVE_BQ_OUTPUT_TABLE",
  "HIVE_GCS_INPUT_DATABASE",
  "HIVE_GCS_INPUT_TABLE",
  "HIVE_GCS_OUTPUT_FORMAT",
  "HIVE_GCS_OUTPUT_LOCATION",
  "HIVE_GCS_OUTPUT_MODE",
  "INFER_SCHEMA",
  "INPUT_COMPRESSION",
  "INPUT_DELIMITER",
  "JDBC_BATCH_SIZE",
  "JDBC_CREATE_TABLE_OPTIONS",
  "JDBC_LOWERBOUND",
  "JDBC_PARTITIONCOLUMN",
  "JDBC_UPPERBOUND",
  "JDBCTOGCS_INPUT_DRIVER",
  "JDBCTOGCS_INPUT_LOWERBOUND",
  "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
  "JDBCTOGCS_INPUT_TABLE",
  "JDBCTOGCS_INPUT_UPPERBOUND",
  "JDBCTOGCS_INPUT_URL",
  "JDBCTOGCS_NUMPARTITIONS",
  "JDBCTOGCS_OUTPUT_FORMAT",
  "JDBCTOGCS_OUTPUT_LOCATION",
  "JDBCTOGCS_OUTPUT_MODE",
  "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_DRIVER",
  "JDBCTOJDBC_INPUT_LOWERBOUND",
  "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
  "JDBCTOJDBC_INPUT_TABLE",
  "JDBCTOJDBC_INPUT_UPPERBOUND",
  "JDBCTOJDBC_INPUT_URL",
  "JDBCTOJDBC_NUMPARTITIONS",
  "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
  "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
  "JDBCTOJDBC_OUTPUT_DRIVER",
  "JDBCTOJDBC_OUTPUT_MODE",
  "JDBCTOJDBC_OUTPUT_TABLE",
  "JDBCTOJDBC_OUTPUT_URL",
  "OUTPUT_MODE_APPEND",
  "OUTPUT_MODE_ERRORIFEXISTS",
  "OUTPUT_MODE_IGNORE",
  "OUTPUT_MODE_OVERWRITE",
  "PROJECT_ID_PROP",
  "TABLE",
  "TEMP_GCS_BUCKET",
  "TEXT_BQ_INPUT_INFERSCHEMA",
  "TEXT_BQ_INPUT_LOCATION",
  "TEXT_BQ_LD_TEMP_BUCKET_NAME",
  "TEXT_BQ_OUTPUT_DATASET",
  "TEXT_BQ_OUTPUT_MODE",
  "TEXT_BQ_OUTPUT_TABLE",
  "TEXT_BQ_TEMP_BUCKET",
  "TEXT_INPUT_COMPRESSION",
  "TEXT_INPUT_DELIMITER"
] | 
	[
  {
    "name": "BQ_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "BQ_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_BZIP2",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_DEFLATE",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_GZIP",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_LZ4",
    "type": "statement"
  },
  {
    "name": "COMPRESSION_NONE",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO",
    "type": "statement"
  },
  {
    "name": "FORMAT_AVRO_EXTD",
    "type": "statement"
  },
  {
    "name": "FORMAT_BIGQUERY",
    "type": "statement"
  },
  {
    "name": "FORMAT_CSV",
    "type": "statement"
  },
  {
    "name": "FORMAT_HBASE",
    "type": "statement"
  },
  {
    "name": "FORMAT_JDBC",
    "type": "statement"
  },
  {
    "name": "FORMAT_JSON",
    "type": "statement"
  },
  {
    "name": "FORMAT_PRQT",
    "type": "statement"
  },
  {
    "name": "FORMAT_TXT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "GCS_BT_HBASE_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_BT_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "GCS_JDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_CATALOG_JSON",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HBASE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HEADER",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "HIVE_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_DATABASE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "HIVE_GCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "INFER_SCHEMA",
    "type": "statement"
  },
  {
    "name": "INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "JDBC_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBC_CREATE_TABLE_OPTIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBC_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBC_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBC_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBC_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBC_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_FORMAT",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOGCS_OUTPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_LOWERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_PARTITIONCOLUMN",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_UPPERBOUND",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_INPUT_URL",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_NUMPARTITIONS",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_BATCH_SIZE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_CREATE_TABLE_OPTION",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_DRIVER",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "JDBCTOJDBC_OUTPUT_URL",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_APPEND",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_ERRORIFEXISTS",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_IGNORE",
    "type": "statement"
  },
  {
    "name": "OUTPUT_MODE_OVERWRITE",
    "type": "statement"
  },
  {
    "name": "PROJECT_ID_PROP",
    "type": "statement"
  },
  {
    "name": "TABLE",
    "type": "statement"
  },
  {
    "name": "TEMP_GCS_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_INFERSCHEMA",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_INPUT_LOCATION",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_LD_TEMP_BUCKET_NAME",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_DATASET",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_MODE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_OUTPUT_TABLE",
    "type": "statement"
  },
  {
    "name": "TEXT_BQ_TEMP_BUCKET",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_COMPRESSION",
    "type": "statement"
  },
  {
    "name": "TEXT_INPUT_DELIMITER",
    "type": "statement"
  },
  {
    "name": "__doc__",
    "type": "instance"
  },
  {
    "name": "__file__",
    "type": "instance"
  },
  {
    "name": "__name__",
    "type": "instance"
  },
  {
    "name": "__package__",
    "type": "instance"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=json",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants. | 
| 341 | 14 | 12,062 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	parse_args | true | 
	function | 4 | 4 | false | false | 
	[
  "parse_args",
  "run",
  "build",
  "get_logger",
  "__annotations__",
  "__class__",
  "__delattr__",
  "__dict__",
  "__dir__",
  "__eq__",
  "__format__",
  "__getattribute__",
  "__hash__",
  "__init__",
  "__init_subclass__",
  "__ne__",
  "__new__",
  "__reduce__",
  "__reduce_ex__",
  "__repr__",
  "__setattr__",
  "__sizeof__",
  "__slots__",
  "__str__",
  "__subclasshook__",
  "__doc__",
  "__module__"
] | 
	[
  {
    "name": "build",
    "type": "function"
  },
  {
    "name": "get_logger",
    "type": "function"
  },
  {
    "name": "parse_args",
    "type": "function"
  },
  {
    "name": "run",
    "type": "function"
  },
  {
    "name": "__annotations__",
    "type": "statement"
  },
  {
    "name": "__class__",
    "type": "property"
  },
  {
    "name": "__delattr__",
    "type": "function"
  },
  {
    "name": "__dict__",
    "type": "statement"
  },
  {
    "name": "__dir__",
    "type": "function"
  },
  {
    "name": "__doc__",
    "type": "statement"
  },
  {
    "name": "__eq__",
    "type": "function"
  },
  {
    "name": "__format__",
    "type": "function"
  },
  {
    "name": "__getattribute__",
    "type": "function"
  },
  {
    "name": "__hash__",
    "type": "function"
  },
  {
    "name": "__init__",
    "type": "function"
  },
  {
    "name": "__init_subclass__",
    "type": "function"
  },
  {
    "name": "__module__",
    "type": "statement"
  },
  {
    "name": "__ne__",
    "type": "function"
  },
  {
    "name": "__new__",
    "type": "function"
  },
  {
    "name": "__reduce__",
    "type": "function"
  },
  {
    "name": "__reduce_ex__",
    "type": "function"
  },
  {
    "name": "__repr__",
    "type": "function"
  },
  {
    "name": "__setattr__",
    "type": "function"
  },
  {
    "name": "__sizeof__",
    "type": "function"
  },
  {
    "name": "__slots__",
    "type": "statement"
  },
  {
    "name": "__str__",
    "type": "function"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=json",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        #mock_spark_session.dataframe.DataFrame.write.mode().json.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args6(self, mock_spark_session):
        """Tests JDBCToGCSTemplate pass args"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template. | 
| 342 | 14 | 12,538 | 
	googlecloudplatform__dataproc-templates | 
	d62560011b069690d01cf2db563788bf81029623 | 
	python/test/jdbc/test_jdbc_to_gcs.py | 
	Unknown | 
	run | true | 
	function | 4 | 4 | false | false | 
	[
  "run",
  "parse_args",
  "build",
  "get_logger",
  "__annotations__",
  "__class__",
  "__delattr__",
  "__dict__",
  "__dir__",
  "__eq__",
  "__format__",
  "__getattribute__",
  "__hash__",
  "__init__",
  "__init_subclass__",
  "__ne__",
  "__new__",
  "__reduce__",
  "__reduce_ex__",
  "__repr__",
  "__setattr__",
  "__sizeof__",
  "__slots__",
  "__str__",
  "__subclasshook__",
  "__doc__",
  "__module__"
] | 
	[
  {
    "name": "build",
    "type": "function"
  },
  {
    "name": "get_logger",
    "type": "function"
  },
  {
    "name": "parse_args",
    "type": "function"
  },
  {
    "name": "run",
    "type": "function"
  },
  {
    "name": "__annotations__",
    "type": "statement"
  },
  {
    "name": "__class__",
    "type": "property"
  },
  {
    "name": "__delattr__",
    "type": "function"
  },
  {
    "name": "__dict__",
    "type": "statement"
  },
  {
    "name": "__dir__",
    "type": "function"
  },
  {
    "name": "__doc__",
    "type": "statement"
  },
  {
    "name": "__eq__",
    "type": "function"
  },
  {
    "name": "__format__",
    "type": "function"
  },
  {
    "name": "__getattribute__",
    "type": "function"
  },
  {
    "name": "__hash__",
    "type": "function"
  },
  {
    "name": "__init__",
    "type": "function"
  },
  {
    "name": "__init_subclass__",
    "type": "function"
  },
  {
    "name": "__module__",
    "type": "statement"
  },
  {
    "name": "__ne__",
    "type": "function"
  },
  {
    "name": "__new__",
    "type": "function"
  },
  {
    "name": "__reduce__",
    "type": "function"
  },
  {
    "name": "__reduce_ex__",
    "type": "function"
  },
  {
    "name": "__repr__",
    "type": "function"
  },
  {
    "name": "__setattr__",
    "type": "function"
  },
  {
    "name": "__sizeof__",
    "type": "function"
  },
  {
    "name": "__slots__",
    "type": "statement"
  },
  {
    "name": "__str__",
    "type": "function"
  }
] | 
	"""
 * Copyright 2022 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
"""
import mock
import pyspark
from dataproc_templates.jdbc.jdbc_to_gcs import JDBCToGCSTemplate
import dataproc_templates.util.template_constants as constants
class TestJDBCToGCSTemplate:
    """
    Test suite for JDBCToGCSTemplate
    """
    def test_parse_args1(self):
        """Tests JDBCToGCSTemplate.parse_args()"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append",
             "--jdbctogcs.output.partitioncolumn=column"
             ])       
        
        assert parsed_args["jdbctogcs.input.url"] == "url"
        assert parsed_args["jdbctogcs.input.driver"] == "driver"
        assert parsed_args["jdbctogcs.input.table"] == "table1"
        assert parsed_args["jdbctogcs.input.partitioncolumn"] == "column"
        assert parsed_args["jdbctogcs.input.lowerbound"] == "1"
        assert parsed_args["jdbctogcs.input.upperbound"] == "2"
        assert parsed_args["jdbctogcs.numpartitions"] == "5"
        assert parsed_args["jdbctogcs.output.location"] == "gs://test"
        assert parsed_args["jdbctogcs.output.format"] == "csv"
        assert parsed_args["jdbctogcs.output.mode"] == "append"  
        assert parsed_args["jdbctogcs.output.partitioncolumn"] == "column"      
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args2(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write parquet"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=parquet",
             "--jdbctogcs.output.mode=overwrite"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_OVERWRITE)
        mock_spark_session.dataframe.DataFrame.write.mode().parquet.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args3(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write avro"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=avro",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_APPEND)
        mock_spark_session.dataframe.DataFrame.write.mode().format.assert_called_once_with(constants.FORMAT_AVRO)
        mock_spark_session.dataframe.DataFrame.write.mode().format().save.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args4(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write csv"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        mock_spark_session.dataframe.DataFrame.write.mode().option.assert_called_once_with(constants.CSV_HEADER, True)
        mock_spark_session.dataframe.DataFrame.write.mode().option().csv.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args5(self, mock_spark_session):
        """Tests JDBCToGCSTemplate write json"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.input.partitioncolumn=column",
             "--jdbctogcs.input.lowerbound=1",
             "--jdbctogcs.input.upperbound=2",
             "--jdbctogcs.numpartitions=5",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=json",
             "--jdbctogcs.output.mode=ignore"
             ])
        mock_spark_session.read.format().option().option().option().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template.run(mock_spark_session, mock_parsed_args)
        mock_spark_session.read.format.assert_called_with(constants.FORMAT_JDBC)
        mock_spark_session.read.format().option.assert_called_with(constants.JDBC_URL, "url")
        mock_spark_session.read.format().option().option.assert_called_with(constants.JDBC_DRIVER, "driver")
        mock_spark_session.read.format().option().option().option.assert_called_with(constants.JDBC_TABLE, "table1")
        mock_spark_session.read.format().option().option().option().option.assert_called_with(constants.JDBC_PARTITIONCOLUMN, "column")
        mock_spark_session.read.format().option().option().option().option().option.assert_called_with(constants.JDBC_LOWERBOUND, "1")
        mock_spark_session.read.format().option().option().option().option().option().option.assert_called_with(constants.JDBC_UPPERBOUND, "2")
        mock_spark_session.read.format().option().option().option().option().option().option().option.assert_called_with(constants.JDBC_NUMPARTITIONS, "5")
        mock_spark_session.read.format().option().option().option().option().option().option().option().load()
        mock_spark_session.dataframe.DataFrame.write.mode.assert_called_once_with(constants.OUTPUT_MODE_IGNORE)
        #mock_spark_session.dataframe.DataFrame.write.mode().json.assert_called_once_with("gs://test")
        
    @mock.patch.object(pyspark.sql, 'SparkSession')
    def test_run_pass_args6(self, mock_spark_session):
        """Tests JDBCToGCSTemplate pass args"""
        jdbc_to_gcs_template = JDBCToGCSTemplate()
        mock_parsed_args = jdbc_to_gcs_template.parse_args(
            ["--jdbctogcs.input.url=url",
             "--jdbctogcs.input.driver=driver",
             "--jdbctogcs.input.table=table1",
             "--jdbctogcs.output.location=gs://test",
             "--jdbctogcs.output.format=csv",
             "--jdbctogcs.output.mode=append"
             ])
        mock_spark_session.read.format().option().option().option().option().load.return_value = mock_spark_session.dataframe.DataFrame
        jdbc_to_gcs_template. | 
			Subsets and Splits
				
	
				
			
				
No community queries yet
The top public SQL queries from the community will appear here once available.
