Inicializar repositório

This commit is contained in:
Adriano Serighelli 2025-03-13 17:05:36 -03:00
commit 5b4882f638
5 changed files with 916 additions and 0 deletions

63
.gitattributes vendored Normal file
View File

@ -0,0 +1,63 @@
###############################################################################
# Set default behavior to automatically normalize line endings.
###############################################################################
* text=auto
###############################################################################
# Set default behavior for command prompt diff.
#
# This is need for earlier builds of msysgit that does not have it on by
# default for csharp files.
# Note: This is only used by command line
###############################################################################
#*.cs diff=csharp
###############################################################################
# Set the merge driver for project and solution files
#
# Merging from the command prompt will add diff markers to the files if there
# are conflicts (Merging from VS is not affected by the settings below, in VS
# the diff markers are never inserted). Diff markers may cause the following
# file extensions to fail to load in VS. An alternative would be to treat
# these files as binary and thus will always conflict and require user
# intervention with every merge. To do so, just uncomment the entries below
###############################################################################
#*.sln merge=binary
#*.csproj merge=binary
#*.vbproj merge=binary
#*.vcxproj merge=binary
#*.vcproj merge=binary
#*.dbproj merge=binary
#*.fsproj merge=binary
#*.lsproj merge=binary
#*.wixproj merge=binary
#*.modelproj merge=binary
#*.sqlproj merge=binary
#*.wwaproj merge=binary
###############################################################################
# behavior for image files
#
# image files are treated as binary by default.
###############################################################################
#*.jpg binary
#*.png binary
#*.gif binary
###############################################################################
# diff behavior for common document formats
#
# Convert binary document formats to text before diffing them. This feature
# is only available from the command line. Turn it on by uncommenting the
# entries below.
###############################################################################
#*.doc diff=astextplain
#*.DOC diff=astextplain
#*.docx diff=astextplain
#*.DOCX diff=astextplain
#*.dot diff=astextplain
#*.DOT diff=astextplain
#*.pdf diff=astextplain
#*.PDF diff=astextplain
#*.rtf diff=astextplain
#*.RTF diff=astextplain

363
.gitignore vendored Normal file
View File

@ -0,0 +1,363 @@
## Ignore Visual Studio temporary files, build results, and
## files generated by popular Visual Studio add-ons.
##
## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
# User-specific files
*.rsuser
*.suo
*.user
*.userosscache
*.sln.docstates
# User-specific files (MonoDevelop/Xamarin Studio)
*.userprefs
# Mono auto generated files
mono_crash.*
# Build results
[Dd]ebug/
[Dd]ebugPublic/
[Rr]elease/
[Rr]eleases/
x64/
x86/
[Ww][Ii][Nn]32/
[Aa][Rr][Mm]/
[Aa][Rr][Mm]64/
bld/
[Bb]in/
[Oo]bj/
[Oo]ut/
[Ll]og/
[Ll]ogs/
# Visual Studio 2015/2017 cache/options directory
.vs/
# Uncomment if you have tasks that create the project's static files in wwwroot
#wwwroot/
# Visual Studio 2017 auto generated files
Generated\ Files/
# MSTest test Results
[Tt]est[Rr]esult*/
[Bb]uild[Ll]og.*
# NUnit
*.VisualState.xml
TestResult.xml
nunit-*.xml
# Build Results of an ATL Project
[Dd]ebugPS/
[Rr]eleasePS/
dlldata.c
# Benchmark Results
BenchmarkDotNet.Artifacts/
# .NET Core
project.lock.json
project.fragment.lock.json
artifacts/
# ASP.NET Scaffolding
ScaffoldingReadMe.txt
# StyleCop
StyleCopReport.xml
# Files built by Visual Studio
*_i.c
*_p.c
*_h.h
*.ilk
*.meta
*.obj
*.iobj
*.pch
*.pdb
*.ipdb
*.pgc
*.pgd
*.rsp
*.sbr
*.tlb
*.tli
*.tlh
*.tmp
*.tmp_proj
*_wpftmp.csproj
*.log
*.vspscc
*.vssscc
.builds
*.pidb
*.svclog
*.scc
# Chutzpah Test files
_Chutzpah*
# Visual C++ cache files
ipch/
*.aps
*.ncb
*.opendb
*.opensdf
*.sdf
*.cachefile
*.VC.db
*.VC.VC.opendb
# Visual Studio profiler
*.psess
*.vsp
*.vspx
*.sap
# Visual Studio Trace Files
*.e2e
# TFS 2012 Local Workspace
$tf/
# Guidance Automation Toolkit
*.gpState
# ReSharper is a .NET coding add-in
_ReSharper*/
*.[Rr]e[Ss]harper
*.DotSettings.user
# TeamCity is a build add-in
_TeamCity*
# DotCover is a Code Coverage Tool
*.dotCover
# AxoCover is a Code Coverage Tool
.axoCover/*
!.axoCover/settings.json
# Coverlet is a free, cross platform Code Coverage Tool
coverage*.json
coverage*.xml
coverage*.info
# Visual Studio code coverage results
*.coverage
*.coveragexml
# NCrunch
_NCrunch_*
.*crunch*.local.xml
nCrunchTemp_*
# MightyMoose
*.mm.*
AutoTest.Net/
# Web workbench (sass)
.sass-cache/
# Installshield output folder
[Ee]xpress/
# DocProject is a documentation generator add-in
DocProject/buildhelp/
DocProject/Help/*.HxT
DocProject/Help/*.HxC
DocProject/Help/*.hhc
DocProject/Help/*.hhk
DocProject/Help/*.hhp
DocProject/Help/Html2
DocProject/Help/html
# Click-Once directory
publish/
# Publish Web Output
*.[Pp]ublish.xml
*.azurePubxml
# Note: Comment the next line if you want to checkin your web deploy settings,
# but database connection strings (with potential passwords) will be unencrypted
*.pubxml
*.publishproj
# Microsoft Azure Web App publish settings. Comment the next line if you want to
# checkin your Azure Web App publish settings, but sensitive information contained
# in these scripts will be unencrypted
PublishScripts/
# NuGet Packages
*.nupkg
# NuGet Symbol Packages
*.snupkg
# The packages folder can be ignored because of Package Restore
**/[Pp]ackages/*
# except build/, which is used as an MSBuild target.
!**/[Pp]ackages/build/
# Uncomment if necessary however generally it will be regenerated when needed
#!**/[Pp]ackages/repositories.config
# NuGet v3's project.json files produces more ignorable files
*.nuget.props
*.nuget.targets
# Microsoft Azure Build Output
csx/
*.build.csdef
# Microsoft Azure Emulator
ecf/
rcf/
# Windows Store app package directories and files
AppPackages/
BundleArtifacts/
Package.StoreAssociation.xml
_pkginfo.txt
*.appx
*.appxbundle
*.appxupload
# Visual Studio cache files
# files ending in .cache can be ignored
*.[Cc]ache
# but keep track of directories ending in .cache
!?*.[Cc]ache/
# Others
ClientBin/
~$*
*~
*.dbmdl
*.dbproj.schemaview
*.jfm
*.pfx
*.publishsettings
orleans.codegen.cs
# Including strong name files can present a security risk
# (https://github.com/github/gitignore/pull/2483#issue-259490424)
#*.snk
# Since there are multiple workflows, uncomment next line to ignore bower_components
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
#bower_components/
# RIA/Silverlight projects
Generated_Code/
# Backup & report files from converting an old project file
# to a newer Visual Studio version. Backup files are not needed,
# because we have git ;-)
_UpgradeReport_Files/
Backup*/
UpgradeLog*.XML
UpgradeLog*.htm
ServiceFabricBackup/
*.rptproj.bak
# SQL Server files
*.mdf
*.ldf
*.ndf
# Business Intelligence projects
*.rdl.data
*.bim.layout
*.bim_*.settings
*.rptproj.rsuser
*- [Bb]ackup.rdl
*- [Bb]ackup ([0-9]).rdl
*- [Bb]ackup ([0-9][0-9]).rdl
# Microsoft Fakes
FakesAssemblies/
# GhostDoc plugin setting file
*.GhostDoc.xml
# Node.js Tools for Visual Studio
.ntvs_analysis.dat
node_modules/
# Visual Studio 6 build log
*.plg
# Visual Studio 6 workspace options file
*.opt
# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
*.vbw
# Visual Studio LightSwitch build output
**/*.HTMLClient/GeneratedArtifacts
**/*.DesktopClient/GeneratedArtifacts
**/*.DesktopClient/ModelManifest.xml
**/*.Server/GeneratedArtifacts
**/*.Server/ModelManifest.xml
_Pvt_Extensions
# Paket dependency manager
.paket/paket.exe
paket-files/
# FAKE - F# Make
.fake/
# CodeRush personal settings
.cr/personal
# Python Tools for Visual Studio (PTVS)
__pycache__/
*.pyc
# Cake - Uncomment if you are using it
# tools/**
# !tools/packages.config
# Tabs Studio
*.tss
# Telerik's JustMock configuration file
*.jmconfig
# BizTalk build output
*.btp.cs
*.btm.cs
*.odx.cs
*.xsd.cs
# OpenCover UI analysis results
OpenCover/
# Azure Stream Analytics local run output
ASALocalRun/
# MSBuild Binary and Structured Log
*.binlog
# NVidia Nsight GPU debugger configuration file
*.nvuser
# MFractors (Xamarin productivity tool) working folder
.mfractor/
# Local History for Visual Studio
.localhistory/
# BeatPulse healthcheck temp database
healthchecksdb
# Backup folder for Package Reference Convert tool in Visual Studio 2017
MigrationBackup/
# Ionide (cross platform F# VS Code tools) working folder
.ionide/
# Fody - auto-generated XML schema
FodyWeavers.xsd

29
PI_Assync_PLD.csproj Normal file
View File

@ -0,0 +1,29 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net6.0-windows</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<PlatformTarget>AnyCPU</PlatformTarget>
</PropertyGroup>
<ItemGroup>
<COMReference Include="WinHttp">
<WrapperTool>tlbimp</WrapperTool>
<VersionMinor>1</VersionMinor>
<VersionMajor>5</VersionMajor>
<Guid>662901fc-6951-4854-9eb2-d9a2570f2b2e</Guid>
<Lcid>0</Lcid>
<Isolated>false</Isolated>
<EmbedInteropTypes>true</EmbedInteropTypes>
</COMReference>
</ItemGroup>
<ItemGroup>
<PackageReference Include="Newtonsoft.Json" Version="13.0.1" />
<PackageReference Include="Npgsql" Version="6.0.5" />
<PackageReference Include="System.Data.OleDb" Version="6.0.0" />
</ItemGroup>
</Project>

25
PI_Assync_PLD.sln Normal file
View File

@ -0,0 +1,25 @@

Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio Version 17
VisualStudioVersion = 17.1.32319.34
MinimumVisualStudioVersion = 10.0.40219.1
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "PI_Assync_PLD", "PI_Assync_PLD.csproj", "{79AD0C7C-4BAD-4E76-A64D-32224198BBC7}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Release|Any CPU = Release|Any CPU
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{79AD0C7C-4BAD-4E76-A64D-32224198BBC7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{79AD0C7C-4BAD-4E76-A64D-32224198BBC7}.Debug|Any CPU.Build.0 = Debug|Any CPU
{79AD0C7C-4BAD-4E76-A64D-32224198BBC7}.Release|Any CPU.ActiveCfg = Release|Any CPU
{79AD0C7C-4BAD-4E76-A64D-32224198BBC7}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {F525B50C-5009-4870-9C74-37BF8C691FFC}
EndGlobalSection
EndGlobal

436
Program.cs Normal file
View File

@ -0,0 +1,436 @@
// See https://aka.ms/new-console-template for more information
using Newtonsoft.Json;
using System;
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Data.OleDb;
using System.Data.SqlClient;
using System.Diagnostics;
using System.Drawing;
using System.Globalization;
using System.Linq;
using System.Net.Http;
using System.Security.Authentication;
using System.Security.Cryptography.X509Certificates;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using System.Xml;
using Npgsql;
internal class Plat_integ
{
//tempo medio de execucao para processamente sincrono: 15 dias: 5,4s / 30 dias:10,0s / 60 dias:16,2s
static string PG_CONN_STRING_DEV = "Server = smart-energia-dev-pgsql.cykff7tj7mik.us-east-1.rds.amazonaws.com; Port = 5432; Database = smartimptest; User Id = postgres; Password = VfHml#Z78!%kvvNM;";
static string PG_CONN_STRING_PROD = "Server = smart-energia-dev-pgsql.cykff7tj7mik.us-east-1.rds.amazonaws.com; Port = 5432; Database = smartenergiaprod; User Id = postgres; Password = VfHml#Z78!%kvvNM;";
static string ENVIRONMENT = "prod"; // "dev" | "prod"
static string BUILD_FOR = "access"; // "access" | "postgres"
static void Main()
{
MainAsync().Wait();
}
private static async Task<int> MainAsync()
{
string caminho_BD;
int max_i;
DateTime data_inicial;
DateTime tempo_cod, tempo_ini;
Thread.CurrentThread.CurrentCulture = CultureInfo.CreateSpecificCulture("en-GB");
//auxiliar para calculo do tempo de execucao do codigo
tempo_ini = DateTime.Now;
tempo_cod = DateTime.Now;
//caminho do BD em que sera salvo os valores de PLD
caminho_BD = @"X:\Middle\Informativo Setorial\Modelo Word\BD_SCDE.accdb";
//data inicial conforme ultimo valor salvo no BD
data_inicial = data_inicio_BD(caminho_BD);
DateTime data_final = DateTime.Now.AddDays(1);
if (data_inicial.Year != data_final.Year)
{
data_final = new DateTime(data_inicial.Year, 12, 31);
}
//roda a requisicao de forma sincrona para obter o numero total de paginas - tambem busca os dados da primeira pagina
max_i = processar_XML_sync(requisicao_http_sync(xml_requisicao(1, data_inicial, data_final)), caminho_BD);
Console.Write("Tempo de execucao da etapa inicial: {0}", DateTime.Now - tempo_cod);
Console.WriteLine("\nHavera {0} processos rodando em paralelo", max_i - 1);
tempo_cod = DateTime.Now;
if (BUILD_FOR == "postgres")
{
int pagesN;
for (int i = 2; i <= max_i; i++)
{
pagesN = processar_XML_sync(requisicao_http_sync(xml_requisicao(i, data_inicial, data_final)), caminho_BD);
}
}
else
{
//abre a conexao com o BD
OleDbConnection access_conn = new("Provider=Microsoft.ACE.OLEDB.12.0;Data Source=" + caminho_BD + ";Jet OLEDB:Database Password=gds21");
access_conn.Open();
await Task.WhenAll(assync_requisicao_http(data_inicial, max_i, caminho_BD, access_conn));
Thread.Sleep(2000);
Console.WriteLine("Concluido. Tempo TOTAL de execucao: {0}", DateTime.Now - tempo_ini);
//repete a requisicao de forma assincrona para as demais paginas
access_conn.Close();
}
return 0;
}
public static async Task<int> assync_requisicao_http(DateTime data_inicial, int max_i, string caminho_BD, OleDbConnection access_conn)
{
int i;
var handler = new HttpClientHandler();
Task<HttpResponseMessage>[] arr_Tks = new Task<HttpResponseMessage>[max_i - 1];
Task[] arr_Tks_gen = new Task[max_i - 1];
var arr_Var = new HttpResponseMessage[max_i - 1];
// Carrega o certificado do arquivo .pfx
X509Certificate2 cert = new X509Certificate2("X:\\Back\\APP Smart\\Certificado\\Certificado Fernando.pfx", "appsmart");
handler.ClientCertificates.Add(cert);
DateTime data_final = DateTime.Now;
if (data_inicial.Year != data_final.Year)
{
data_final = new DateTime(data_inicial.Year, 12, 31);
}
DateTime aux_tempo = DateTime.Now;
var client = new HttpClient(handler);
client.DefaultRequestHeaders.Add("SOAPAction", "listarPLD");
var endpoint = new Uri("https://servicos.ccee.org.br:443/ws/prec/PLDBSv1");
Parallel.For(2, max_i + 1, (i) =>
{
var payload = new StringContent(xml_requisicao(i, data_inicial, data_final), Encoding.UTF8, "application/json");
arr_Tks[i - 2] = client.PostAsync(endpoint, payload);
});
Parallel.For(2, max_i + 1, (i, state) =>
{
var retorno = arr_Tks[i - 2].Result;
try
{
arr_Tks_gen[i - 2] = processar_XML_assync(retorno.Content.ReadAsStringAsync().Result, caminho_BD, access_conn);
}
catch (AggregateException ae)
{
Task.Delay(100);
arr_Tks_gen[i - 2] = processar_XML_assync(retorno.Content.ReadAsStringAsync().Result, caminho_BD, access_conn);
}
});
await Task.WhenAll(arr_Tks_gen.ToArray());
return 2;
}
//processa as respostas de forma assincrona
public static async Task processar_XML_assync(string entrada, string caminho_BD, OleDbConnection access_conn)
{
XmlDocument doc = new XmlDocument();
doc.LoadXml(entrada);
//doc.Save(@"X:\Back\PLD Horário\xmlresposta.xml");
int hora = 0, dia_semana = 0;
int j;
int num_pag;
double dia = 0;
string submercado = "", mes = "", access_strCOM, pg_strCOM;
double valor = 0;
DateTime aux_data = new DateTime(2005, 01, 01);
DateTime tempo_xml;
XmlNamespaceManager nsmgr = new XmlNamespaceManager(doc.NameTable);
nsmgr.AddNamespace("bm", "http://xmlns.energia.org.br/BM/v1");
nsmgr.AddNamespace("bo", "http://xmlns.energia.org.br/BO/v1");
nsmgr.AddNamespace("hdr", "http://xmlns.energia.org.br/MH/v1");
nsmgr.AddNamespace("soapenv", "http://schemas.xmlsoap.org/soap/envelope");
nsmgr.AddNamespace("xsi", "http://www.w3.org/2001/XMLSchema-instance");
tempo_xml = DateTime.Now;
int.TryParse(doc.SelectSingleNode("//hdr:totalPaginas", nsmgr).InnerText, out num_pag);
//Console.WriteLine("Total paginas " + num_pag);
await Task.Run(() =>
{
//START PGSQL
NpgsqlConnection pg_conn;
if (ENVIRONMENT is "dev")
{
pg_conn = new(PG_CONN_STRING_DEV);
}
else
{
pg_conn = new(PG_CONN_STRING_PROD);
}
if (BUILD_FOR != "access")
{
pg_conn.Open();
}
foreach (XmlNode n1 in doc.SelectNodes("//bm:plds//bm:pld", nsmgr))
{
XmlNode n2 = n1;
aux_data = DateTime.ParseExact(n2.SelectSingleNode("bo:vigencia//bo:inicio", nsmgr).InnerText, "yyyy-MM-dd'T'HH:mm:ss'-03:00'", null);
dia = (aux_data.ToOADate() - aux_data.ToOADate() % 1);
hora = aux_data.Hour;
mes = aux_data.ToString("yyMM");
dia_semana = (int)aux_data.DayOfWeek;
if (dia_semana == 0)
{
dia_semana = 7;
}
for (j = 0; j <= 3; j++)
{
submercado = n2.SelectNodes("bo:valores//bo:valor//bo:submercado//bo:nome", nsmgr)[j].InnerText;
Double.TryParse(n2.SelectNodes("bo:valores//bo:valor//bo:valor//bo:valor", nsmgr)[j].InnerText, out valor);
if (BUILD_FOR == "access")
{
//START ACCESS
access_strCOM = "INSERT INTO PLD_comp (Data, Hora, Submercado, Valor, Mes_ref, Dia_da_semana)";
access_strCOM += " VALUES (" + dia + "," + (hora + 1) + ",\"" + submercado + "\"," + valor + "," + mes + "," + dia_semana + ")";
OleDbCommand tbPLD = new(access_strCOM, access_conn);
OleDbDataReader reader = tbPLD.ExecuteReader();
//END ACCESS
}
else
{
pg_strCOM = "INSERT INTO pld (dia_num, hora, submercado, valor, mes_ref, dia_da_semana) ";
pg_strCOM += "VALUES (" + dia + ", " + (hora + 1) + ", '" + submercado + "', " + valor + ", '" + mes + "', " + dia_semana + ")";
NpgsqlCommand pg_tcSCDE = new(pg_strCOM, pg_conn);
NpgsqlDataReader pg_reader = pg_tcSCDE.ExecuteReader();
pg_reader.Close();
//END PGSQL
}
}
}
pg_conn.Close();
});
foreach (XmlNode n1 in doc.GetElementsByTagName("hdr:numero"))
{
Console.WriteLine("Processando pagina {0} de {1} - tempo de execucao {2}", n1.InnerText, num_pag, DateTime.Now - tempo_xml);
}
}
//prepara o xml de entrada a ser enviado à CCEE
public static string xml_requisicao(int pag, DateTime dat_ini, DateTime dat_fim)
{
string cam_ent, tex_req, sdat_ini, sdat_fim;
cam_ent = @"X:\Back\PLD Horário\listarPLD.txt";
sdat_ini = dat_ini.ToString("yyyy-MM-ddT00:00:00"); //2022-12-31T00:00:00
sdat_fim = dat_fim.ToString("yyyy-MM-ddT00:00:00");
tex_req = File.ReadAllText(cam_ent);
tex_req = tex_req.Replace("DATA_INI", sdat_ini);
tex_req = tex_req.Replace("DATA_FIM", sdat_fim);
tex_req = tex_req.Replace("PAG_NUM", pag.ToString());
return tex_req;
}
//calcula a data de inicio da solicitacao do PLD, utilizando como base o ultimo dia de valor do PLD salvo no BD
public static DateTime data_inicio_BD(string caminho_BD)
{
string access_strCOM, pg_strCOM;
DateTime saida_ult_dia = DateTime.Now;
if (BUILD_FOR == "access")
{
access_strCOM = "SELECT MAX(Data) As ultimo_dia FROM PLD_comp";
OleDbConnection access_conn = new("Provider=Microsoft.ACE.OLEDB.12.0;Data Source=" + caminho_BD + ";Jet OLEDB:Database Password=gds21");
access_conn.Open();
OleDbCommand access_tbPLD = new(access_strCOM, access_conn);
OleDbDataReader access_reader = access_tbPLD.ExecuteReader();
access_reader.Read();
DateTime.TryParse(access_reader["ultimo_dia"].ToString(), out saida_ult_dia);
access_conn.Close();
saida_ult_dia = saida_ult_dia.AddDays(1);
Console.WriteLine("Importando dados a partir de:" + saida_ult_dia);
}
else
{
pg_strCOM = "SELECT TO_DATE('1899-12-30', 'YYYY-MM-DD') + INTERVAL '1' DAY * MAX(dia_num) AS ultimo_dia FROM pld";
NpgsqlConnection pg_conn;
if (ENVIRONMENT is "dev")
{
pg_conn = new(PG_CONN_STRING_DEV);
}
else
{
pg_conn = new(PG_CONN_STRING_PROD);
}
if (BUILD_FOR != "access")
{
pg_conn.Open();
}
NpgsqlCommand pg_tcSCDE = new(pg_strCOM, pg_conn);
NpgsqlDataReader pg_reader = pg_tcSCDE.ExecuteReader();
pg_reader.Read();
Console.WriteLine("" + pg_reader["ultimo_dia"].ToString());
if (pg_reader["ultimo_dia"].ToString().Length > 0)
{
DateTime.TryParse(pg_reader["ultimo_dia"].ToString(), out saida_ult_dia);
pg_reader.Close();
saida_ult_dia = saida_ult_dia.AddDays(1);
Console.WriteLine("Importando dados a partir de:" + saida_ult_dia);
}
else
{
saida_ult_dia = DateTime.Now.AddYears(-2);
Console.WriteLine("Importando dados a partir de:" + saida_ult_dia);
}
}
return saida_ult_dia;
}
//requisicao sincrona de valor do PLD
public static string requisicao_http_sync(string tex_req)
{
var handler = new HttpClientHandler();
// Carrega o certificado do arquivo .pfx
X509Certificate2 cert = new X509Certificate2("X:\\Back\\APP Smart\\Certificado\\Certificado Fernando.pfx", "appsmart");
handler.ClientCertificates.Add(cert);
using (var client = new HttpClient(handler))
{
client.DefaultRequestHeaders.Add("SOAPAction", "listarPLD");
var endpoint = new Uri("https://servicos.ccee.org.br:443/ws/prec/PLDBSv1");
var newPostJson = JsonConvert.ToString(tex_req);
var payload = new StringContent(tex_req, Encoding.UTF8, "application/json");
//modificar a proxima linha para transformar em assincrono
//var retorno = client.PostAsync(endpoint, payload).Result.Content.ReadAsStringAsync().Result;
var retorno = client.PostAsync(endpoint, payload).Result.Content.ReadAsStringAsync().Result;
//https://stackoverflow.com/questions/642293/how-do-i-read-and-parse-an-xml-file-in
return retorno;
}
}
//processa os dados da resposta de forma sincrona, retorna o numero total de paginas da requisicao
public static int processar_XML_sync(string entrada, string caminho_BD)
{
XmlDocument doc = new XmlDocument();
doc.LoadXml(entrada);
doc.Save(@"X:\Back\PLD Horário\xmlresposta.xml");
int hora = 0, dia_semana = 0;
int num_pag;
double dia = 0;
string submercado = "", mes = "", access_strCOM, pg_strCOM;
double valor = 0;
DateTime aux_data = new DateTime(2005, 01, 01);
//abre a conexao com o BD
OleDbConnection access_conn = new("Provider=Microsoft.ACE.OLEDB.12.0;Data Source=" + caminho_BD + ";Jet OLEDB:Database Password=gds21");
access_conn.Open();
NpgsqlConnection pg_conn;
if (ENVIRONMENT is "dev")
{
pg_conn = new(PG_CONN_STRING_DEV);
}
else
{
pg_conn = new(PG_CONN_STRING_PROD);
}
if (BUILD_FOR != "access")
{
pg_conn.Open();
}
Thread.CurrentThread.CurrentCulture = CultureInfo.CreateSpecificCulture("en-GB");
num_pag = 1;
foreach (XmlNode n1 in doc.GetElementsByTagName("hdr:totalPaginas"))
{
int.TryParse(n1.InnerText, out num_pag);
}
foreach (XmlNode n1 in doc.GetElementsByTagName("hdr:numero"))
{
Console.WriteLine("Processando pagina {0} de {1}", n1.InnerText, num_pag);
}
foreach (XmlNode n1 in doc.GetElementsByTagName("bm:pld"))
{
foreach (XmlNode n2 in n1.ChildNodes)
{
foreach (XmlNode n3 in n2.ChildNodes)
{
if (n3.Name == "bo:inicio")
{
aux_data = DateTime.ParseExact(n3.InnerText, "yyyy-MM-dd'T'HH:mm:ss'-03:00'", null);
dia = (aux_data.ToOADate() - aux_data.ToOADate() % 1);
hora = aux_data.Hour;
mes = aux_data.ToString("yyMM");
dia_semana = (int)aux_data.DayOfWeek;
if (dia_semana == 0)
{
dia_semana = 7;
}
}
if (n3.Name == "bo:valor")
{
foreach (XmlNode n4 in n3.ChildNodes)
{
if (n4.Name == "bo:submercado" || n4.Name == "bo:valor")
{
foreach (XmlNode n5 in n4.ChildNodes)
{
switch (n5.Name)
{
case "bo:nome":
submercado = n5.InnerText;
break;
case "bo:valor":
Double.TryParse(n5.InnerText, out valor);
//Console.WriteLine("Mes:" + mes + " - Dia da semana:" + dia_semana + " - Dia:" + dia + " - Hora:" + hora + "- Submercado:" + submercado + "- Valor:" + valor + "\n");
//Escreva as informacoes no BD
if (BUILD_FOR == "access")
{
access_strCOM = "INSERT INTO PLD_comp (Data, Hora, Submercado, Valor, Mes_ref, Dia_da_semana)";
access_strCOM += " VALUES (" + dia + "," + (hora + 1) + ",\"" + submercado + "\"," + valor + "," + mes + "," + dia_semana + ")";
//Console.WriteLine(strCOM);
OleDbCommand access_tbPLD = new(access_strCOM, access_conn);
OleDbDataReader access_reader = access_tbPLD.ExecuteReader();
access_reader.Close();
}
else
{
pg_strCOM = "INSERT INTO pld " +
"(dia_num, hora, submercado, valor, mes_ref, dia_da_semana) ";
pg_strCOM += "VALUES (" + dia + ", " + (hora + 1) + ", '" + submercado + "', " + valor + ", '" + mes + "', " + dia_semana + ")";
NpgsqlCommand pg_tcSCDE = new(pg_strCOM, pg_conn);
NpgsqlDataReader pg_reader = pg_tcSCDE.ExecuteReader();
pg_reader.Close();
}
break;
default:
break;
}
}
}
}
}
}
}
}
access_conn.Close();
pg_conn.Close();
return num_pag;
}
}