<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>topic Re: validating record count at SQL server database tabbles with migrated azure data lake gen2 in Get Started Discussions</title>
    <link>https://community.databricks.com/t5/get-started-discussions/validating-record-count-at-sql-server-database-tabbles-with/m-p/49312#M6007</link>
    <description>&lt;P&gt;how much i ever tried doing that, im using my local system for checking and there are no firewalls or any security blocks this is the error message that i keep recieving and unable to fix it&amp;nbsp; :&amp;nbsp;&lt;BR /&gt;&lt;SPAN&gt;com.microsoft.sqlserver.jdbc.SQLServerException: The TCP/IP connection to the host SATHYA, port 1433 has failed. Error: "SATHYA. Verify the connection properties. Make sure that an instance of SQL Server is running on the host and accepting TCP/IP connections at the port. Make sure that TCP connections to the port are not blocked by a firewall.".&lt;/SPAN&gt;&lt;/P&gt;</description>
    <pubDate>Mon, 16 Oct 2023 16:57:06 GMT</pubDate>
    <dc:creator>sai_sathya</dc:creator>
    <dc:date>2023-10-16T16:57:06Z</dc:date>
    <item>
      <title>validating record count at SQL server database tabbles with migrated azure data lake gen2</title>
      <link>https://community.databricks.com/t5/get-started-discussions/validating-record-count-at-sql-server-database-tabbles-with/m-p/46754#M6003</link>
      <description>&lt;P&gt;we are migrating out project from on-premise to azure , so on-premise&amp;nbsp;database is the SQL server that we are using and azure data lake gen2 is the storage location where store data currently and so far we are currently validating record count of each table manually from sql server database tables and similarly we write pyspark code in databricks to write those data as parquet file and we validate record count from pyspark manually every time&amp;nbsp;which is time consuming&lt;/P&gt;&lt;P&gt;is that possible to execute this process to make it automated in order to save time ?&lt;/P&gt;&lt;P&gt;can this be done by using pyspark code or is there any other solution?&lt;/P&gt;</description>
      <pubDate>Fri, 29 Sep 2023 10:29:22 GMT</pubDate>
      <guid>https://community.databricks.com/t5/get-started-discussions/validating-record-count-at-sql-server-database-tabbles-with/m-p/46754#M6003</guid>
      <dc:creator>sai_sathya</dc:creator>
      <dc:date>2023-09-29T10:29:22Z</dc:date>
    </item>
    <item>
      <title>Re: validating record count at SQL server database tabbles with migrated azure data lake gen2</title>
      <link>https://community.databricks.com/t5/get-started-discussions/validating-record-count-at-sql-server-database-tabbles-with/m-p/49312#M6007</link>
      <description>&lt;P&gt;how much i ever tried doing that, im using my local system for checking and there are no firewalls or any security blocks this is the error message that i keep recieving and unable to fix it&amp;nbsp; :&amp;nbsp;&lt;BR /&gt;&lt;SPAN&gt;com.microsoft.sqlserver.jdbc.SQLServerException: The TCP/IP connection to the host SATHYA, port 1433 has failed. Error: "SATHYA. Verify the connection properties. Make sure that an instance of SQL Server is running on the host and accepting TCP/IP connections at the port. Make sure that TCP connections to the port are not blocked by a firewall.".&lt;/SPAN&gt;&lt;/P&gt;</description>
      <pubDate>Mon, 16 Oct 2023 16:57:06 GMT</pubDate>
      <guid>https://community.databricks.com/t5/get-started-discussions/validating-record-count-at-sql-server-database-tabbles-with/m-p/49312#M6007</guid>
      <dc:creator>sai_sathya</dc:creator>
      <dc:date>2023-10-16T16:57:06Z</dc:date>
    </item>
  </channel>
</rss>

