taskkill /IM "sqlservr.exe" /F
timeout 2
net start MSSQLSERVER
Tuesday, September 13, 2022
Invoking a SQL Server job programmatically
Invoke a job from a certain step. Example:
EXECUTE msdb.dbo.sp_start_job @job_name='JobName', @step_name = 'Step3 Name'
Or do so programmatically on-demand:
SqlJobRunner.cs
using System;
using System.Timers;
using Microsoft.SqlServer.Management.Common;
using Microsoft.SqlServer.Management.Smo;
using Microsoft.SqlServer.Management.Smo.Agent;
namespace RefreshFactLedgerEntries
{
public class SqlJobRunner : IDisposable
{
public int TimeoutInterval { get; set; } = 60;
public event EventHandler JobCompleted;
static ServerConnection _conn;
static Server _server;
public SqlJobRunner(string sqlServer, string username, string password)
{
_conn = new ServerConnection(sqlServer, username, password);
_server = new Server(_conn);
}
public CompletionResult Run(string jobName)
{
Job job = GetJob(jobName);
return RunSynchronously(job);
}
private Job GetJob(string jobName)
{
Job job = _server.JobServer.Jobs[jobName]; //Get the specified job
return job;
}
private CompletionResult RunSynchronously(Job job)
{
/*
* Steps:
* (1) Start the job
* - If it's currently running, throw an error
* (2) Continually poll the job status
* (3) Once you get a job status of completed, return the result
* - If the job doesn't finish within the timeout, throw an error
*/
try
{
j
ob.Refresh();
// if not idle, throw an error
if (job.CurrentRunStatus != JobExecutionStatus.Idle)
{
throw new Exception(String.Format("SQL Agent Job {0} is currently busy (in state {1})!", job.Name, job.CurrentRunStatus.ToString()));
}
// start job
job.Start();
// set timer, specifically so we can timeout of long running jobs
Timer timer = new Timer(TimeoutInterval * 1000);
timer.Enabled = true;
timer.Elapsed += new ElapsedEventHandler(TimeoutExpired);
// Wait 10 seconds before polling for the status
System.Threading.Thread.Sleep(10 * 1000);
job.Refresh();
// poll job for status
while (job.CurrentRunStatus == JobExecutionStatus.Executing)
{
System.Threading.Thread.Sleep(10 * 1000); //wait 10 secs before checking again
job.Refresh();
}
// job finished, we don't need the timer anymore
timer.Enabled = false;
timer.Dispose();
return job.LastRunOutcome;
}
catch
{
throw;
}
finally
{
OnJobCompleted(new JobCompletedEventArgs(job.Name, true));
}
}
private void TimeoutExpired(object source, ElapsedEventArgs e)
{
throw new Exception(String.Format("The SQL Agent Job did not finish in the allotted time! Quitting process."));
}
public CompletionResult RunInSequence(string[] jobNames)
{
Job[] jobs = new Job[jobNames.Length];
// make sure these are in the same order as given
for (int i = 0; i < jobNames.Length; i++)
{
jobs[i] = GetJob(jobNames[i]);
}
return RunInSequence(jobs);
}
private CompletionResult RunInSequence(Job[] jobs)
{
foreach (Job job in jobs)
{
CompletionResult result = this.RunSynchronously(job);
if (result != CompletionResult.Succeeded)
{
throw new Exception(String.Format("SQL Agent Job {0} completed with a result of {1} !", job.Name, result.ToString()));
return CompletionResult.Failed;
}
}
return CompletionResult.Succeeded;
}
protected void OnJobCompleted(JobCompletedEventArgs e)
{
if (JobCompleted != null)
JobCompleted(this, e);
}
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
protected virtual void Dispose(bool disposing)
{
if (disposing)
{
// free managed resources
if (_server != null)
_server = null;
if (_conn != null)
{
_conn.Disconnect();
_conn = null;
}
}
// free native resources if there are any.
}
}
}
Program.cs
using System;
using System.Data;
using System.Data.SqlClient;
using System.Net.Mail;
using System.Configuration;
namespace RefreshFactLedgerEntries
{
class Program
{
static string sqlServer = ConfigurationManager.AppSettings["SQLServer"];
static string userName = ConfigurationManager.AppSettings["UserName"];
static string password = ConfigurationManager.AppSettings["Password"];
static string emailRecipients = ConfigurationManager.AppSettings["EmailRecipients"];
static string smtp = ConfigurationManager.AppSettings["SMTPHost"];
static void Main(string[] args)
{
string[] jobs = new string[]
{
"ETL.SQL - Populate Data Warehouse 4 - Fact Ledger Entries"
};
try
{
Console.WriteLine(String.Format("Updating the data warehouse ({0}) with the latest Fact Ledger Entries.", sqlServer));
Console.WriteLine("*** Do not close this window until the process completes ***");
Console.WriteLine("*** This process may take up to an hour to complete ***");
using (SqlJobRunner runner = new SqlJobRunner(sqlServer, userName, password))
{
runner.JobCompleted += Runner_JobCompleted;
runner.RunInSequence(jobs);
}
SendEmail(
emailRecipients,
"Fact Ledger Entries Successfully Updated",
"The data warehouse has been updated with the latest Fact Ledger Entries.");
Console.WriteLine("The process has completed successfully. You may now close this window.");
}
catch (Exception e)
{
string errorInfo = String.Format(" Message: {0} \n Inner Exception: {1} \n Stack Trace: {2}",
e.Message,
e.InnerException,
e.StackTrace);
LogError(e);
SendEmail(
emailRecipients,
"Fact Ledger Entry Update Failed",
errorInfo
);
Console.WriteLine(String.Format("The Fact Ledger Entry update has failed. Please contact IT and include the following info: {0}", errorInfo));
}
finally
{
Console.ReadLine();
}
}
private static void Runner_JobCompleted(object sender, JobCompletedEventArgs e)
{
//Console.WriteLine(String.Format("SQL Agent Job {0} completed successfully", e.JobName));
}
static void SendEmail(string recipients, string subject, string body)
{
MailMessage mail = new MailMessage();
SmtpClient client = new SmtpClient(smtp, Convert.ToInt32(25));
client.UseDefaultCredentials = false;
foreach (var address in recipients.Split(new[] { ";" }, StringSplitOptions.RemoveEmptyEntries))
{
mail.To.Add(address);
}
mail.From = new MailAddress("donotreply@pdp.com");
mail.Subject = subject;
mail.Body = body;
client.Send(mail);
}
static void LogError(Exception e)
{
string dbErrorConn = String.Format("Data Source={0};Initial Catalog=ErrorLog;Persist Security Info=True;User Id={1};Password={2};",
sqlServer,
userName,
password);
using (SqlConnection conn = new SqlConnection(dbErrorConn))
{
using (SqlCommand cmd = new SqlCommand("splog_InsertError", conn))
{
cmd.CommandType = CommandType.StoredProcedure;
cmd.Parameters.AddWithValue("@appName", "RefreshFactLedgerEntry");
cmd.Parameters.AddWithValue("@Module", "RefreshFactLedgerEntry");
cmd.Parameters.AddWithValue("@Message", e.Message);
cmd.Parameters.AddWithValue("@StackTrace", e.StackTrace);
cmd.Parameters.AddWithValue("@InnerException", e.InnerException);
cmd.Parameters.AddWithValue("@Severity", 1);
cmd.Parameters.AddWithValue("@UserId", "System");
conn.Open();
cmd.ExecuteNonQuery();
conn.Close();
}
}
}
}
}
EXECUTE msdb.dbo.sp_start_job @job_name='JobName', @step_name = 'Step3 Name'
Or do so programmatically on-demand:
SqlJobRunner.cs
using System;
using System.Timers;
using Microsoft.SqlServer.Management.Common;
using Microsoft.SqlServer.Management.Smo;
using Microsoft.SqlServer.Management.Smo.Agent;
namespace RefreshFactLedgerEntries
{
public class SqlJobRunner : IDisposable
{
public int TimeoutInterval { get; set; } = 60;
public event EventHandler
static ServerConnection _conn;
static Server _server;
public SqlJobRunner(string sqlServer, string username, string password)
{
_conn = new ServerConnection(sqlServer, username, password);
_server = new Server(_conn);
}
public CompletionResult Run(string jobName)
{
Job job = GetJob(jobName);
return RunSynchronously(job);
}
private Job GetJob(string jobName)
{
Job job = _server.JobServer.Jobs[jobName]; //Get the specified job
return job;
}
private CompletionResult RunSynchronously(Job job)
{
/*
* Steps:
* (1) Start the job
* - If it's currently running, throw an error
* (2) Continually poll the job status
* (3) Once you get a job status of completed, return the result
* - If the job doesn't finish within the timeout, throw an error
*/
try
{
j
ob.Refresh();
// if not idle, throw an error
if (job.CurrentRunStatus != JobExecutionStatus.Idle)
{
throw new Exception(String.Format("SQL Agent Job {0} is currently busy (in state {1})!", job.Name, job.CurrentRunStatus.ToString()));
}
// start job
job.Start();
// set timer, specifically so we can timeout of long running jobs
Timer timer = new Timer(TimeoutInterval * 1000);
timer.Enabled = true;
timer.Elapsed += new ElapsedEventHandler(TimeoutExpired);
// Wait 10 seconds before polling for the status
System.Threading.Thread.Sleep(10 * 1000);
job.Refresh();
// poll job for status
while (job.CurrentRunStatus == JobExecutionStatus.Executing)
{
System.Threading.Thread.Sleep(10 * 1000); //wait 10 secs before checking again
job.Refresh();
}
// job finished, we don't need the timer anymore
timer.Enabled = false;
timer.Dispose();
return job.LastRunOutcome;
}
catch
{
throw;
}
finally
{
OnJobCompleted(new JobCompletedEventArgs(job.Name, true));
}
}
private void TimeoutExpired(object source, ElapsedEventArgs e)
{
throw new Exception(String.Format("The SQL Agent Job did not finish in the allotted time! Quitting process."));
}
public CompletionResult RunInSequence(string[] jobNames)
{
Job[] jobs = new Job[jobNames.Length];
// make sure these are in the same order as given
for (int i = 0; i < jobNames.Length; i++)
{
jobs[i] = GetJob(jobNames[i]);
}
return RunInSequence(jobs);
}
private CompletionResult RunInSequence(Job[] jobs)
{
foreach (Job job in jobs)
{
CompletionResult result = this.RunSynchronously(job);
if (result != CompletionResult.Succeeded)
{
throw new Exception(String.Format("SQL Agent Job {0} completed with a result of {1} !", job.Name, result.ToString()));
return CompletionResult.Failed;
}
}
return CompletionResult.Succeeded;
}
protected void OnJobCompleted(JobCompletedEventArgs e)
{
if (JobCompleted != null)
JobCompleted(this, e);
}
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
protected virtual void Dispose(bool disposing)
{
if (disposing)
{
// free managed resources
if (_server != null)
_server = null;
if (_conn != null)
{
_conn.Disconnect();
_conn = null;
}
}
// free native resources if there are any.
}
}
}
Program.cs
using System;
using System.Data;
using System.Data.SqlClient;
using System.Net.Mail;
using System.Configuration;
namespace RefreshFactLedgerEntries
{
class Program
{
static string sqlServer = ConfigurationManager.AppSettings["SQLServer"];
static string userName = ConfigurationManager.AppSettings["UserName"];
static string password = ConfigurationManager.AppSettings["Password"];
static string emailRecipients = ConfigurationManager.AppSettings["EmailRecipients"];
static string smtp = ConfigurationManager.AppSettings["SMTPHost"];
static void Main(string[] args)
{
string[] jobs = new string[]
{
"ETL.SQL - Populate Data Warehouse 4 - Fact Ledger Entries"
};
try
{
Console.WriteLine(String.Format("Updating the data warehouse ({0}) with the latest Fact Ledger Entries.", sqlServer));
Console.WriteLine("*** Do not close this window until the process completes ***");
Console.WriteLine("*** This process may take up to an hour to complete ***");
using (SqlJobRunner runner = new SqlJobRunner(sqlServer, userName, password))
{
runner.JobCompleted += Runner_JobCompleted;
runner.RunInSequence(jobs);
}
SendEmail(
emailRecipients,
"Fact Ledger Entries Successfully Updated",
"The data warehouse has been updated with the latest Fact Ledger Entries.");
Console.WriteLine("The process has completed successfully. You may now close this window.");
}
catch (Exception e)
{
string errorInfo = String.Format(" Message: {0} \n Inner Exception: {1} \n Stack Trace: {2}",
e.Message,
e.InnerException,
e.StackTrace);
LogError(e);
SendEmail(
emailRecipients,
"Fact Ledger Entry Update Failed",
errorInfo
);
Console.WriteLine(String.Format("The Fact Ledger Entry update has failed. Please contact IT and include the following info: {0}", errorInfo));
}
finally
{
Console.ReadLine();
}
}
private static void Runner_JobCompleted(object sender, JobCompletedEventArgs e)
{
//Console.WriteLine(String.Format("SQL Agent Job {0} completed successfully", e.JobName));
}
static void SendEmail(string recipients, string subject, string body)
{
MailMessage mail = new MailMessage();
SmtpClient client = new SmtpClient(smtp, Convert.ToInt32(25));
client.UseDefaultCredentials = false;
foreach (var address in recipients.Split(new[] { ";" }, StringSplitOptions.RemoveEmptyEntries))
{
mail.To.Add(address);
}
mail.From = new MailAddress("donotreply@pdp.com");
mail.Subject = subject;
mail.Body = body;
client.Send(mail);
}
static void LogError(Exception e)
{
string dbErrorConn = String.Format("Data Source={0};Initial Catalog=ErrorLog;Persist Security Info=True;User Id={1};Password={2};",
sqlServer,
userName,
password);
using (SqlConnection conn = new SqlConnection(dbErrorConn))
{
using (SqlCommand cmd = new SqlCommand("splog_InsertError", conn))
{
cmd.CommandType = CommandType.StoredProcedure;
cmd.Parameters.AddWithValue("@appName", "RefreshFactLedgerEntry");
cmd.Parameters.AddWithValue("@Module", "RefreshFactLedgerEntry");
cmd.Parameters.AddWithValue("@Message", e.Message);
cmd.Parameters.AddWithValue("@StackTrace", e.StackTrace);
cmd.Parameters.AddWithValue("@InnerException", e.InnerException);
cmd.Parameters.AddWithValue("@Severity", 1);
cmd.Parameters.AddWithValue("@UserId", "System");
conn.Open();
cmd.ExecuteNonQuery();
conn.Close();
}
}
}
}
}
OAuth Bearer Token in Business Central and Azure
##Problem Description
The old Microsoft implementation of Web Services authorization is a giant security hole. Anyone who knows the current NON-expiring user id and the access key should be able to read and tamper with the accounting data. Moreover, anyone with sufficient access in user card could generate a new access key.
Microsoft has deprecated the feature and in future releases will not make it available to the Cloud installations of Business Central. (On-premise installation could still use it though for the foreseeable future.)
The OAuth token has a limited lifetime and needs to be constantly refreshed. We could automatically refresh the token - forever. (Only generating the first token requires Azure active directory login.)
(Should we go to prod before Microsoft totally removes the current Web service Access Key method, we need to wipe out any existing web access keys used by the consultants or internal users.)
##Register Azure app as described here in detail:
https:/
/www.youtube.com/watch?v=lCzqg2N0vbA
(I have downloaded the above Youtube video locally in the remote case the author pulls out the video. Microsoft's documentation is absurdly insufficient in implementing the plumbing - in this article they the general road map is listed:
https://docs.microsoft.com/en-us/dynamics365/business-central/dev-itpro/webservices/authenticate-web-services-using-oauth
)
The already-registered app is shown below. Note that you don't need to ever create the actual app. The registration is sufficient - the registration is a proxy to get the access and refresh tokens.
https://portal.azure.com/#blade/Microsoft_AAD_RegisteredApps/ApplicationsListBlade
**In Prod you may have to either alter the registered app's parameters to point to the Prod URLs or create a new registered app with the new Prod URLs.**
##Get the first token from PostMan:
Set the parameters and click on "Get New Access Token". (I could provide you the Postman parameters on Teams. Basically they are derived from the registered app.)
Active Directory login will pop up and you need to supply your credentials.
![image.png](.attachments/image-1daafd73-4f7a-474f-bb9b-71038ccff1e6.png)
Copy and paste the access token. and then scroll down and copy and paste the refresh token.
You will put the tokens into the following SQL. But before executing the SQL, stop the instance of the Prefix OAuth Token refresher scheduled task shown below.
_update [NavisionIntegration].[dbo].[PrefixBusinessCentralOAuthToken] set [access_token]=‘YourNewAccessToken’,
[refresh_token]=‘yournewRefreshToken’_
![image.png](.attachments/image-150975ad-0725-46f3-b0de-d6e1e08c5bb9.png)
##Stop instances of PrefixBusinessCentralOAtuhTokenRefresher scheduled task
PrefixBusinessCentralOAtuhTokenRefresher project under the Prefix.Automation solution is a scheduled task that uses the refresh_token to get a new access token.
Stop dev-apps and Fenix instances of scheduled task
![image.png](.attachments/image-5c8fb5b0-e352-4775-8ded-9d8335791bdb.png)
##Store the access and refresh token in DB
update [NavisionIntegration].[dbo].[PrefixBusinessCentralOAuthToken] set [access_token]='YourNewAccessToken', [refresh_token]='yournewRefreshToken'
##Run in your server machine
**Note: run only one instance of the refresher app. Since there is now only one registered app in Azure. Otherwise you would issue multiple unnecessary refreshes per hour**
**Note: Most of the time the refresher scheduled task is disabled in dev-apps. The is to save some bandwidth against Azure. That means the token, and more importantly, the refresh token in the database would be obsolete since both would have expired. In that case you need to get a newly seeded access and refresh tokens from Postman.**
###PrefixOAuthMicroService
All apps get the latest token from the database through a call to **PrefixOAuthMicroService** project which is also in the Prefix.Automation solution.
private static string GetOAuthAccessToken()
{
var client = new HttpClient();
Task resultOrder = client.GetStringAsync(ConfigurationManager.AppSettings["PrefixOAuthMicroService"]);
return resultOrder.Result;
// or get it from the database:
/* NavisionIntegrationEntities model = new NavisionIntegrationEntities();
var lst = model.usp_GetPrefixBusinessCentralOAuthToken().ToList();
return lst[0].access_token;*/
}
where in config the micoservice URL is specified:
Since the token expires every hour or so by Microsoft, we have to get the common token from DB, and NOT form config file(s), and refresh it 24/7 .
(Extending the expiration period of the access token was not possible with an Azure cmdlet. Also the refresh token has a much longer expiration period than the access token. However since we are constantly refreshing with
the PrefixBusinessCentralOAtuhTokenRefresher
task, our taken would be up-to-date indefinitely. I've tested this for days running though automation. )
Should go down for hours in production, we may have to re-generate the original access and refresh tokens and store them in the NavisionIntegration database before starting the scheduled task.
The old Microsoft implementation of Web Services authorization is a giant security hole. Anyone who knows the current NON-expiring user id and the access key should be able to read and tamper with the accounting data. Moreover, anyone with sufficient access in user card could generate a new access key.
Microsoft has deprecated the feature and in future releases will not make it available to the Cloud installations of Business Central. (On-premise installation could still use it though for the foreseeable future.)
The OAuth token has a limited lifetime and needs to be constantly refreshed. We could automatically refresh the token - forever. (Only generating the first token requires Azure active directory login.)
(Should we go to prod before Microsoft totally removes the current Web service Access Key method, we need to wipe out any existing web access keys used by the consultants or internal users.)
##Register Azure app as described here in detail:
https:/
/www.youtube.com/watch?v=lCzqg2N0vbA
(I have downloaded the above Youtube video locally in the remote case the author pulls out the video. Microsoft's documentation is absurdly insufficient in implementing the plumbing - in this article they the general road map is listed:
https://docs.microsoft.com/en-us/dynamics365/business-central/dev-itpro/webservices/authenticate-web-services-using-oauth
)
The already-registered app is shown below. Note that you don't need to ever create the actual app. The registration is sufficient - the registration is a proxy to get the access and refresh tokens.
https://portal.azure.com/#blade/Microsoft_AAD_RegisteredApps/ApplicationsListBlade
**In Prod you may have to either alter the registered app's parameters to point to the Prod URLs or create a new registered app with the new Prod URLs.**
##Get the first token from PostMan:
Set the parameters and click on "Get New Access Token". (I could provide you the Postman parameters on Teams. Basically they are derived from the registered app.)
Active Directory login will pop up and you need to supply your credentials.
![image.png](.attachments/image-1daafd73-4f7a-474f-bb9b-71038ccff1e6.png)
Copy and paste the access token. and then scroll down and copy and paste the refresh token.
You will put the tokens into the following SQL. But before executing the SQL, stop the instance of the Prefix OAuth Token refresher scheduled task shown below.
_update [NavisionIntegration].[dbo].[PrefixBusinessCentralOAuthToken] set [access_token]=‘YourNewAccessToken’,
[refresh_token]=‘yournewRefreshToken’_
![image.png](.attachments/image-150975ad-0725-46f3-b0de-d6e1e08c5bb9.png)
##Stop instances of PrefixBusinessCentralOAtuhTokenRefresher scheduled task
PrefixBusinessCentralOAtuhTokenRefresher project under the Prefix.Automation solution is a scheduled task that uses the refresh_token to get a new access token.
Stop dev-apps and Fenix instances of scheduled task
![image.png](.attachments/image-5c8fb5b0-e352-4775-8ded-9d8335791bdb.png)
##Store the access and refresh token in DB
update [NavisionIntegration].[dbo].[PrefixBusinessCentralOAuthToken] set [access_token]='YourNewAccessToken', [refresh_token]='yournewRefreshToken'
##Run in your server machine
**Note: run only one instance of the refresher app. Since there is now only one registered app in Azure. Otherwise you would issue multiple unnecessary refreshes per hour**
**Note: Most of the time the refresher scheduled task is disabled in dev-apps. The is to save some bandwidth against Azure. That means the token, and more importantly, the refresh token in the database would be obsolete since both would have expired. In that case you need to get a newly seeded access and refresh tokens from Postman.**
###PrefixOAuthMicroService
All apps get the latest token from the database through a call to **PrefixOAuthMicroService** project which is also in the Prefix.Automation solution.
private static string GetOAuthAccessToken()
{
var client = new HttpClient();
Task
return resultOrder.Result;
// or get it from the database:
/* NavisionIntegrationEntities model = new NavisionIntegrationEntities();
var lst = model.usp_GetPrefixBusinessCentralOAuthToken().ToList();
return lst[0].access_token;*/
}
where in config the micoservice URL is specified:
Since the token expires every hour or so by Microsoft, we have to get the common token from DB, and NOT form config file(s), and refresh it 24/7 .
(Extending the expiration period of the access token was not possible with an Azure cmdlet. Also the refresh token has a much longer expiration period than the access token. However since we are constantly refreshing with
the PrefixBusinessCentralOAtuhTokenRefresher
task, our taken would be up-to-date indefinitely. I've tested this for days running though automation. )
Should go down for hours in production, we may have to re-generate the original access and refresh tokens and store them in the NavisionIntegration database before starting the scheduled task.
Activate a Windows Service on the current machine
public static string ActivateHybridService(string machine)
{
try
{
List serviceList = new List();
List InterestedList = new List();
ServiceController[] services = ServiceController.GetServices(machine);
foreach (ServiceController service in services)
{
// check for service names
if( "Infrastructure.Email.WinService" == service.ServiceName || "Finance.Pricing.Navision" == service.ServiceName)
{
string currentStatus = service.Status.ToString();
if (currentStatus.Trim().ToLower() != "running")
{
// config flag : if auto-reset then automatically Start service // if (appSettings.AutoStart == "1") {
service.Start();
using (StreamWriter sw = new StreamWriter(@"C:\junk\ReplicatePLMServiceRestartInfo.txt", true))
{
sw.WriteLine(DateTime.Now);
sw.WriteLine(service.ServiceName + " was Auto-Started.");
}
return "Service was Auto-Started.";
}
}
}
}
}
catch (Exception err)
{
using (StreamWriter sw = new StreamWriter(@"C:\junk\ReplicatePLMServicesRestar.txt", true))
{
sw.WriteLine(err.Message);
sw.WriteLine((err.InnerException == null ? string.Empty : err.InnerException.Message) +
(err.StackTrace ?? string.Empty));
}
return err.Message;
}
return "OK";
}
{
try
{
List
List
ServiceController[] services = ServiceController.GetServices(machine);
foreach (ServiceController service in services)
{
// check for service names
if( "Infrastructure.Email.WinService" == service.ServiceName || "Finance.Pricing.Navision" == service.ServiceName)
{
string currentStatus = service.Status.ToString();
if (currentStatus.Trim().ToLower() != "running")
{
// config flag : if auto-reset then automatically Start service // if (appSettings.AutoStart == "1") {
service.Start();
using (StreamWriter sw = new StreamWriter(@"C:\junk\ReplicatePLMServiceRestartInfo.txt", true))
{
sw.WriteLine(DateTime.Now);
sw.WriteLine(service.ServiceName + " was Auto-Started.");
}
return "Service was Auto-Started.";
}
}
}
}
}
catch (Exception err)
{
using (StreamWriter sw = new StreamWriter(@"C:\junk\ReplicatePLMServicesRestar.txt", true))
{
sw.WriteLine(err.Message);
sw.WriteLine((err.InnerException == null ? string.Empty : err.InnerException.Message) +
(err.StackTrace ?? string.Empty));
}
return err.Message;
}
return "OK";
}
Save to Excel on Client side
Client-side code calling a server side endpoint.
utils.getExport = function () { window.location = urbase + '/Home/GetExportRowsRedir?tab=' + $('#tab').val() + "&query=" + $('#searchStar').val() + "&submitted=" + $('#filterProcessed').val(); };
Server-side endpoint building and returning a File to client side
[HttpGet]
public FileResult GetExportRowsRedir(int tab, string query, int submitted)
{
NavisionIntegrationEntities navIModel = new NavisionIntegrationEntities();
var rows = navIModel.usp_GetCustomerServiceOrderWeb2("", query, "", "", "", "", tab.ToString(),
submitted, 0, 100000, 0).ToList();
StringBuilder sb = new StringBuilder();
sb.AppendLine("Created,Last Name,First name,Address,Address 2,City,State,Zip,Country,SKU,Qty,Phone,Email,Ticket #,Procd,External_Tracking_No_,PNo_,ShippingNo_,Agent" +
(AdminUtils.IsAdmin(User.Identity.Name) ? ",User" : ""));
foreach (var row in rows)
{
sb.Append(row.Date_Created);
sb.Append(",");
sb.Append(row.Last_Name);
sb.Append(",");
sb.Append(row.First_Name);
if (AdminUtils.IsAdmin(User.Identity.Name))
{
sb.Append(",");
sb.Append(row.Created_by);
}
sb.AppendLine();
}
MemoryStream stream = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(sb.ToString()));
return File(stream, "text/csv", string.Format("Export-{0}.csv", DateTime.Now.ToString("yyyyMMdd-HHmmss")));
}
Upon completion of the server side code and returning back to client, the file is dwonloaded to user's machine. For example in Chrome:
utils.getExport = function () { window.location = urbase + '/Home/GetExportRowsRedir?tab=' + $('#tab').val() + "&query=" + $('#searchStar').val() + "&submitted=" + $('#filterProcessed').val(); };
Server-side endpoint building and returning a File to client side
[HttpGet]
public FileResult GetExportRowsRedir(int tab, string query, int submitted)
{
NavisionIntegrationEntities navIModel = new NavisionIntegrationEntities();
var rows = navIModel.usp_GetCustomerServiceOrderWeb2("", query, "", "", "", "", tab.ToString(),
submitted, 0, 100000, 0).ToList();
StringBuilder sb = new StringBuilder();
sb.AppendLine("Created,Last Name,First name,Address,Address 2,City,State,Zip,Country,SKU,Qty,Phone,Email,Ticket #,Procd,External_Tracking_No_,PNo_,ShippingNo_,Agent" +
(AdminUtils.IsAdmin(User.Identity.Name) ? ",User" : ""));
foreach (var row in rows)
{
sb.Append(row.Date_Created);
sb.Append(",");
sb.Append(row.Last_Name);
sb.Append(",");
sb.Append(row.First_Name);
if (AdminUtils.IsAdmin(User.Identity.Name))
{
sb.Append(",");
sb.Append(row.Created_by);
}
sb.AppendLine();
}
MemoryStream stream = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(sb.ToString()));
return File(stream, "text/csv", string.Format("Export-{0}.csv", DateTime.Now.ToString("yyyyMMdd-HHmmss")));
}
Upon completion of the server side code and returning back to client, the file is dwonloaded to user's machine. For example in Chrome:
Monday, February 21, 2022
Front End Development Basic Tools
Overview
Npm is your toolbox, including tools such as bower and gulp
Bower is a package manager. It pulls down the javascript libraries you want locally. We (generally) don’t want to keep these in source control, as the gulp optimized ones are kept in source control instead.
Gulp is run over the top of your source files to minimize, optimize, and otherwise process CSS, JS, and Images for production use.
SASS
Sass is a stylesheet language that’s compiled to CSS. It allows you to use variables, nested rules, mixins, functions, and more, all with a fully CSS-compatible syntax. Sass helps keep large stylesheets well-organized and makes it easy to share design within and across projects.
https://sass-lang.com/
https://sass-lang.com/guide
https://css-tricks.com/the-sass-ampersand/
BEM
BEM (Block Element Modifier) is a styling methodology that helps you to create reusable components and code sharing in front-end development.
Block: Standalone entity that is meaningful on its own.
Element: A part of a block that has no standalone meaning and is semantically tied to its block.
Modifier: A flag on a block or element. Use them to change appearance or behavior.
Official Website: http://getbem.com/
Quick Start: http://getbem.com/introduction/
NPM
npm makes it easy for JavaScript developers to share and reuse code, and makes it easy to update the code that you’re sharing, so you can build amazing things.
Official Website: https://www.npmjs.com/get-npm
Gulp
Gulp is a task runner.
Modern web development has many repetitive tasks like running a local server, minifying code, optimizing images, preprocessing CSS and more. This text discusses gulp, a build tool for automating these tasks. Gulp is a cross-platform, streaming task runner that lets developers automate many development tasks. At a high level, gulp reads files as streams and pipes the streams to different tasks. These tasks are code-based and use plugins. The tasks modify the files, building source files into production files.
Official Website: https://gulpjs.com/
Quick Start: https://gulpjs.com/docs/en/getting-started/quick-start
Also see:
https://developers.google.com/web/ilt/pwa/introduction-to-gulp
Bower
Bower is a package manager. It works by fetching, downloading, and installing packages as needed.
Official Website: https://bower.io/
.NET Core
.NET Core is an open-source, general-purpose development platform maintained by Microsoft and the .NET community on GitHub. It’s cross-platform (supporting Windows, macOS, and Linux) and can be used to build device, cloud, and IoT applications.
Official Website: https://dotnet.microsoft.com/
Also see:
https://docs.microsoft.com/en-us/dotnet/core/
Razor Pages
Razor is a server-side markup language that lets you embed server-based code (Visual Basic and C#) into web pages.
See:
https://docs.microsoft.com/en-us/aspnet/core/razor-pages/?view=aspnetcore-2.2&tabs=visual-studio
https://www.w3schools.com/asp/razor_intro.asp
Nuget
NuGet is the package manager for .NET. The NuGet client tools provide the ability to produce and consume packages. The NuGet Gallery is the central package repository used by all package authors and consumers.
Official Website: https://www.nuget.org/
Npm is your toolbox, including tools such as bower and gulp
Bower is a package manager. It pulls down the javascript libraries you want locally. We (generally) don’t want to keep these in source control, as the gulp optimized ones are kept in source control instead.
Gulp is run over the top of your source files to minimize, optimize, and otherwise process CSS, JS, and Images for production use.
SASS
Sass is a stylesheet language that’s compiled to CSS. It allows you to use variables, nested rules, mixins, functions, and more, all with a fully CSS-compatible syntax. Sass helps keep large stylesheets well-organized and makes it easy to share design within and across projects.
https://sass-lang.com/
https://sass-lang.com/guide
https://css-tricks.com/the-sass-ampersand/
BEM
BEM (Block Element Modifier) is a styling methodology that helps you to create reusable components and code sharing in front-end development.
Block: Standalone entity that is meaningful on its own.
Element: A part of a block that has no standalone meaning and is semantically tied to its block.
Modifier: A flag on a block or element. Use them to change appearance or behavior.
Official Website: http://getbem.com/
Quick Start: http://getbem.com/introduction/
NPM
npm makes it easy for JavaScript developers to share and reuse code, and makes it easy to update the code that you’re sharing, so you can build amazing things.
Official Website: https://www.npmjs.com/get-npm
Gulp
Gulp is a task runner.
Modern web development has many repetitive tasks like running a local server, minifying code, optimizing images, preprocessing CSS and more. This text discusses gulp, a build tool for automating these tasks. Gulp is a cross-platform, streaming task runner that lets developers automate many development tasks. At a high level, gulp reads files as streams and pipes the streams to different tasks. These tasks are code-based and use plugins. The tasks modify the files, building source files into production files.
Official Website: https://gulpjs.com/
Quick Start: https://gulpjs.com/docs/en/getting-started/quick-start
Also see:
https://developers.google.com/web/ilt/pwa/introduction-to-gulp
Bower
Bower is a package manager. It works by fetching, downloading, and installing packages as needed.
Official Website: https://bower.io/
.NET Core
.NET Core is an open-source, general-purpose development platform maintained by Microsoft and the .NET community on GitHub. It’s cross-platform (supporting Windows, macOS, and Linux) and can be used to build device, cloud, and IoT applications.
Official Website: https://dotnet.microsoft.com/
Also see:
https://docs.microsoft.com/en-us/dotnet/core/
Razor Pages
Razor is a server-side markup language that lets you embed server-based code (Visual Basic and C#) into web pages.
See:
https://docs.microsoft.com/en-us/aspnet/core/razor-pages/?view=aspnetcore-2.2&tabs=visual-studio
https://www.w3schools.com/asp/razor_intro.asp
Nuget
NuGet is the package manager for .NET. The NuGet client tools provide the ability to produce and consume packages. The NuGet Gallery is the central package repository used by all package authors and consumers.
Official Website: https://www.nuget.org/
Moving or pointing a SQL Server database to a new disk location
Sample SQL for Moving SSISDB:
From:
C:\Program Files\Microsoft SQL Server\MSSQL12.MSSQLSERVER\MSSQL\DATA
To:
D:\SQLData
– Check SSISDB settings and they should point to the From files paths
use master
ALTER DATABASE SSISDB SET SINGLE_USER WITH Rollback Immediate
ALTER DATABASE SSISDB SET OFFLINE WITH ROLLBACK IMMEDIATE
ALTER DATABASE SSISDB MODIFY FILE ( NAME = data, FILENAME = ‘D:\SQLData\SSISDB.mdf’)
ALTER DATABASE SSISDB MODIFY FILE ( NAME = log, FILENAME = ‘D:\SQLData\SSISDB.ldf’)
ALTER DATABASE SSISDB set online
ALTER DATABASE SSISDB set multi_user
– Check SSISDB settings and they should point to D:\SQLData
See: https://www.sqlshack.com/move-sql-database-files-mdf-ldf-another-location/
Microsoft Doc covering all scenarios:
https://docs.microsoft.com/en-us/sql/relational-databases/databases/move-user-databases?view=sql-server-2017
Moving System Databases:
https://docs.microsoft.com/en-us/sql/relational-databases/databases/move-system-databases?view=sql-server-2017
From:
C:\Program Files\Microsoft SQL Server\MSSQL12.MSSQLSERVER\MSSQL\DATA
To:
D:\SQLData
– Check SSISDB settings and they should point to the From files paths
use master
ALTER DATABASE SSISDB SET SINGLE_USER WITH Rollback Immediate
ALTER DATABASE SSISDB SET OFFLINE WITH ROLLBACK IMMEDIATE
ALTER DATABASE SSISDB MODIFY FILE ( NAME = data, FILENAME = ‘D:\SQLData\SSISDB.mdf’)
ALTER DATABASE SSISDB MODIFY FILE ( NAME = log, FILENAME = ‘D:\SQLData\SSISDB.ldf’)
ALTER DATABASE SSISDB set online
ALTER DATABASE SSISDB set multi_user
– Check SSISDB settings and they should point to D:\SQLData
Further docs
See: https://www.sqlshack.com/move-sql-database-files-mdf-ldf-another-location/
Microsoft Doc covering all scenarios:
https://docs.microsoft.com/en-us/sql/relational-databases/databases/move-user-databases?view=sql-server-2017
Moving System Databases:
https://docs.microsoft.com/en-us/sql/relational-databases/databases/move-system-databases?view=sql-server-2017
Subscribe to:
Posts (Atom)