Properly using the Count Function - sql

In the Enrollment_Changes table, the phone model listed is the phone the subscriber changed FROM on that date.
If there is no subsequent change on Enrollment_Changes, the phone the subscriber changed TO is listed on the P_Enrollment table
For example, subscriber 12345678 enrolled on 1/5/2011 with a RAZR. On 11/1/2011 he changed FROM the RAZR. You can see what he changed TO with the next transaction on Enrollment_Changes on 05/19/2012.
How would you find the Count of subs that first enrolled with the iPhone 3?
Here is the code I have for creating the tables
Create Tables: TBL 1
USE [Test2]
GO
/****** Object: Table [dbo].[P_ENROLLMENT] ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
CREATE TABLE [dbo].[P_ENROLLMENT](
[Subid ] [float] NULL,
[Enrollment_Date] [datetime] NULL,
[Channel] [nvarchar](255) NULL,
[Region] [nvarchar](255) NULL,
[Active_Status] [float] NULL,
[Drop_Date] [datetime] NULL,
[Phone_Model] [nvarchar](255) NULL
) ON [PRIMARY]
GO
TBL 2
USE [Test2]
GO
/****** Object: Table [dbo].[ENROLLMENT_CHANGES] ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
CREATE TABLE [dbo].[ENROLLMENT_CHANGES](
[Subid] [float] NULL,
[Cdate] [datetime] NULL,
[Phone_Model] [nvarchar](255) NULL
) ON [PRIMARY]
GO
Insert TBL1
INSERT INTO [P_ENROLLMENT]([Subid ], [Enrollment_Date], [Channel], [Region], [Active_Status], [Drop_Date], [Phone_Model])
VALUES(12345678, '2011-01-05 00:00:00', 'Retail', 'Southeast', 1, NULL, 'iPhone 4');
INSERT INTO [P_ENROLLMENT]([Subid ], [Enrollment_Date], [Channel], [Region], [Active_Status], [Drop_Date], [Phone_Model])
VALUES(12346178, '2011-03-13 00:00:00', 'Indirect Dealers', 'West', 1, NULL, 'HTC Hero');
INSERT INTO [P_ENROLLMENT]([Subid ], [Enrollment_Date], [Channel], [Region], [Active_Status], [Drop_Date], [Phone_Model])
VALUES(12346679, '2011-05-19 00:00:00', 'Indirect Dealers', 'Southeast', 0, '2012-03-15 00:00:00', 'Droid 2');
INSERT INTO [P_ENROLLMENT]([Subid ], [Enrollment_Date], [Channel], [Region], [Active_Status], [Drop_Date], [Phone_Model])
VALUES(12347190, '2011-07-25 00:00:00', 'Retail', 'Northeast', 0, '2012-05-21 00:00:00', 'iPhone 4');
INSERT INTO [P_ENROLLMENT]([Subid ], [Enrollment_Date], [Channel], [Region], [Active_Status], [Drop_Date], [Phone_Model])
VALUES(12347701, '2011-08-14 00:00:00', 'Indirect Dealers', 'West', 1, NULL, 'HTC Hero');
INSERT INTO [P_ENROLLMENT]([Subid ], [Enrollment_Date], [Channel], [Region], [Active_Status], [Drop_Date], [Phone_Model])
VALUES(12348212, '2011-09-30 00:00:00', 'Retail', 'West', 1, NULL, 'Droid 2');
INSERT INTO [P_ENROLLMENT]([Subid ], [Enrollment_Date], [Channel], [Region], [Active_Status], [Drop_Date], [Phone_Model])
VALUES(12348723, '2011-10-20 00:00:00', 'Retail', 'Southeast', 1, NULL, 'Southeast');
INSERT INTO [P_ENROLLMENT]([Subid ], [Enrollment_Date], [Channel], [Region], [Active_Status], [Drop_Date], [Phone_Model])
VALUES(12349234, '2012-01-06 00:00:00', 'Indirect Dealers', 'West', 0, '2012-02-14 00:00:00', 'West');
INSERT INTO [P_ENROLLMENT]([Subid ], [Enrollment_Date], [Channel], [Region], [Active_Status], [Drop_Date], [Phone_Model])
VALUES(12349745, '2012-01-26 00:00:00', 'Retail', 'Northeast', 0, '2012-04-15 00:00:00', 'HTC Hero');
INSERT INTO [P_ENROLLMENT]([Subid ], [Enrollment_Date], [Channel], [Region], [Active_Status], [Drop_Date], [Phone_Model])
VALUES(12350256, '2012-02-11 00:00:00', 'Retail', 'Southeast', 1, NULL, 'iPhone 4');
INSERT INTO [P_ENROLLMENT]([Subid ], [Enrollment_Date], [Channel], [Region], [Active_Status], [Drop_Date], [Phone_Model])
VALUES(12350767, '2012-03-02 00:00:00', 'Indirect Dealers', 'West', 1, NULL, 'Sidekick');
INSERT INTO [P_ENROLLMENT]([Subid ], [Enrollment_Date], [Channel], [Region], [Active_Status], [Drop_Date], [Phone_Model])
VALUES(12351278, '2012-04-18 00:00:00', 'Retail', 'Midwest', 1, NULL, 'iPhone 3');
INSERT INTO [P_ENROLLMENT]([Subid ], [Enrollment_Date], [Channel], [Region], [Active_Status], [Drop_Date], [Phone_Model])
VALUES(12351789, '2012-05-08 00:00:00', 'Indirect Dealers', 'West', 0, '2012-07-04 00:00:00', 'iPhone 3');
INSERT INTO [P_ENROLLMENT]([Subid ], [Enrollment_Date], [Channel], [Region], [Active_Status], [Drop_Date], [Phone_Model])
VALUES(12352300, '2012-06-24 00:00:00', 'Retail', 'Midwest', 1, NULL, 'Droid 2');
INSERT INTO [P_ENROLLMENT]([Subid ], [Enrollment_Date], [Channel], [Region], [Active_Status], [Drop_Date], [Phone_Model])
VALUES(12352811, '2012-06-25 00:00:00', 'Retail', 'Southeast', 1, NULL, 'Sidekick');
Insert TBL2
INSERT INTO [ENROLLMENT_CHANGES]([Subid], [Cdate], [Phone_Model])
VALUES(12345678, '2011-11-01 00:00:00', 'RAZR');
INSERT INTO [ENROLLMENT_CHANGES]([Subid], [Cdate], [Phone_Model])
VALUES(12346178, '2012-01-07 00:00:00', 'HTC Hero');
INSERT INTO [ENROLLMENT_CHANGES]([Subid], [Cdate], [Phone_Model])
VALUES(12348723, '2012-01-28 00:00:00', 'RAZR');
INSERT INTO [ENROLLMENT_CHANGES]([Subid], [Cdate], [Phone_Model])
VALUES(12350256, '2012-02-21 00:00:00', 'Blackberry Bold');
INSERT INTO [ENROLLMENT_CHANGES]([Subid], [Cdate], [Phone_Model])
VALUES(12349745, '2012-05-05 00:00:00', 'HTC Hero');
INSERT INTO [ENROLLMENT_CHANGES]([Subid], [Cdate], [Phone_Model])
VALUES(12345678, '2012-05-19 00:00:00', 'Palm Pre');
INSERT INTO [ENROLLMENT_CHANGES]([Subid], [Cdate], [Phone_Model])
VALUES(12347190, '2012-05-20 00:00:00', 'HTC Hero');
INSERT INTO [ENROLLMENT_CHANGES]([Subid], [Cdate], [Phone_Model])
VALUES(12350256, '2012-05-21 00:00:00', 'Blackberry Bold');
INSERT INTO [ENROLLMENT_CHANGES]([Subid], [Cdate], [Phone_Model])
VALUES(12349234, '2012-06-04 00:00:00', 'Palm Pre');
INSERT INTO [ENROLLMENT_CHANGES]([Subid], [Cdate], [Phone_Model])
VALUES(12346178, '2012-06-05 00:00:00', 'iPhone 3');
INSERT INTO [ENROLLMENT_CHANGES]([Subid], [Cdate], [Phone_Model])
VALUES(12350767, '2012-06-10 00:00:00', 'iPhone 3');

For the count
select COUNT(*) Total
from
(
select e.*,
rn = row_number() over (partition by e.subid order by c.cdate desc),
first_model = coalesce(c.phone_model, e.phone_model)
from [P_ENROLLMENT] e
left join [ENROLLMENT_CHANGES] c on c.subid = e.subid
) x
where rn=1 and first_model = 'iPhone 3'
For all the records
select *
from
(
select e.*,
rn = row_number() over (partition by e.subid order by c.cdate desc),
first_model = coalesce(c.phone_model, e.phone_model)
from [P_ENROLLMENT] e
left join [ENROLLMENT_CHANGES] c on c.subid = e.subid
) x
where rn=1 and first_model = 'iPhone 3'
order by subid

You want to know if the first record in the table is an iPhone 3. Something like this:
select count(*)
from (select e.*,
row_number() over (partition by subid order by enrollment_date) as seqnum
from p_enrollment e
) e
where seqnum = 1 and phone_model = 'iPhone 3'

Perhaps I'm thinking too simply, but wouldn't either of the following do what you're looking for?:
SELECT Phone_Model
, COUNT(*) AS Initially_Enrolled
FROM p_enrollment
GROUP BY Phone_Model
(working SQLFiddle: http://sqlfiddle.com/#!3/68258/4)
or
SELECT COUNT(*) AS Initially_Enrolled
FROM p_enrollment
WHERE Phone_Model = 'iPhone 3'
(working SQLFiddle: http://sqlfiddle.com/#!3/68258/3)
Since you only want initial enrollment, the ENROLLMENT_CHANGES table is irrelevant.

Related

Extract words from a column and count frequency

Does anyone know if there's an efficient way to extract all the words from a single column and count the frequency of each word in SQL Server? I only have read-only access to my database so I can't create a self-defined function to do this.
Here's a reproducible example:
CREATE TABLE words
(
id INT PRIMARY KEY,
text_column VARCHAR(1000)
);
INSERT INTO words (id, text_column)
VALUES
(1, 'SQL Server is a popular database management system'),
(2, 'It is widely used for data storage and retrieval'),
(3, 'SQL Server is a powerful tool for data analysis');
I have found this code but it's not working correctly, and I think it's too complicated to understand:
WITH E1(N) AS
(
SELECT 1
FROM (VALUES
(1),(1),(1),(1),(1),(1),(1),(1),(1),(1)
) t(N)
),
E2(N) AS (SELECT 1 FROM E1 a CROSS JOIN E1 b),
E4(N) AS (SELECT 1 FROM E2 a CROSS JOIN E2 b)
SELECT
LOWER(x.Item) AS [Word],
COUNT(*) AS [Counts]
FROM
(SELECT * FROM words) a
CROSS APPLY
(SELECT
ItemNumber = ROW_NUMBER() OVER(ORDER BY l.N1),
Item = LTRIM(RTRIM(SUBSTRING(a.text_column, l.N1, l.L1)))
FROM
(SELECT
s.N1,
L1 = ISNULL(NULLIF(CHARINDEX(' ',a.text_column,s.N1),0)-s.N1,4000)
FROM
(SELECT 1
UNION ALL
SELECT t.N+1
FROM
(SELECT TOP (ISNULL(DATALENGTH(a.text_column)/2,0))
ROW_NUMBER() OVER (ORDER BY (SELECT NULL))
FROM E4) t(N)
WHERE SUBSTRING(a.text_column ,t.N,1) = ' '
) s(N1)
) l(N1, L1)
) x
WHERE
x.item <> ''
AND x.Item NOT IN ('0o', '0s', '3a', '3b', '3d', '6b', '6o', 'a', 'a1', 'a2', 'a3', 'a4', 'ab', 'able', 'about', 'above', 'abst', 'ac', 'accordance', 'according', 'accordingly', 'across', 'act', 'actually', 'ad', 'added', 'adj', 'ae', 'af', 'affected', 'affecting', 'affects', 'after', 'afterwards', 'ag', 'again', 'against', 'ah', 'ain', 'ain''t', 'aj', 'al', 'all', 'allow', 'allows', 'almost', 'alone', 'along', 'already', 'also', 'although', 'always', 'am', 'among', 'amongst', 'amoungst', 'amount', 'an', 'and', 'announce', 'another', 'any', 'anybody', 'anyhow', 'anymore', 'anyone', 'anything', 'anyway', 'anyways', 'anywhere', 'ao', 'ap', 'apart', 'apparently', 'appear', 'appreciate', 'appropriate', 'approximately', 'ar', 'are', 'aren', 'arent', 'aren''t', 'arise', 'around', 'as', 'a''s', 'aside', 'ask', 'asking', 'associated', 'at', 'au', 'auth', 'av', 'available', 'aw', 'away', 'awfully', 'ax', 'ay', 'az', 'b', 'b1', 'b2', 'b3', 'ba', 'back', 'bc', 'bd', 'be', 'became', 'because', 'become', 'becomes', 'becoming', 'been', 'before', 'beforehand', 'begin', 'beginning', 'beginnings', 'begins', 'behind', 'being', 'believe', 'below', 'beside', 'besides', 'best', 'better', 'between', 'beyond', 'bi', 'bill', 'biol', 'bj', 'bk', 'bl', 'bn', 'both', 'bottom', 'bp', 'br', 'brief', 'briefly', 'bs', 'bt', 'bu', 'but', 'bx', 'by', 'c', 'c1', 'c2', 'c3', 'ca', 'call', 'came', 'can', 'cannot', 'cant', 'can''t', 'cause', 'causes', 'cc', 'cd', 'ce', 'certain', 'certainly', 'cf', 'cg', 'ch', 'changes', 'ci', 'cit', 'cj', 'cl', 'clearly', 'cm', 'c''mon', 'cn', 'co', 'com', 'come', 'comes', 'con', 'concerning', 'consequently', 'consider', 'considering', 'contain', 'containing', 'contains', 'corresponding', 'could', 'couldn', 'couldnt', 'couldn''t', 'course', 'cp', 'cq', 'cr', 'cry', 'cs', 'c''s', 'ct', 'cu', 'currently', 'cv', 'cx', 'cy', 'cz', 'd', 'd2', 'da', 'date', 'dc', 'dd', 'de', 'definitely', 'describe', 'described', 'despite', 'detail', 'df', 'di', 'did', 'didn', 'didn''t', 'different', 'dj', 'dk', 'dl', 'do', 'does', 'doesn', 'doesn''t', 'doing', 'don', 'done', 'don''t', 'down', 'downwards', 'dp', 'dr', 'ds', 'dt', 'du', 'due', 'during', 'dx', 'dy', 'e', 'e2', 'e3', 'ea', 'each', 'ec', 'ed', 'edu', 'ee', 'ef', 'effect', 'eg', 'ei', 'eight', 'eighty', 'either', 'ej', 'el', 'eleven', 'else', 'elsewhere', 'em', 'empty', 'en', 'end', 'ending', 'enough', 'entirely', 'eo', 'ep', 'eq', 'er', 'es', 'especially', 'est', 'et', 'et-al', 'etc', 'eu', 'ev', 'even', 'ever', 'every', 'everybody', 'everyone', 'everything', 'everywhere', 'ex', 'exactly', 'example', 'except', 'ey', 'f', 'f2', 'fa', 'far', 'fc', 'few', 'ff', 'fi', 'fifteen', 'fifth', 'fify', 'fill', 'find', 'fire', 'first', 'five', 'fix', 'fj', 'fl', 'fn', 'fo', 'followed', 'following', 'follows', 'for', 'former', 'formerly', 'forth', 'forty', 'found', 'four', 'fr', 'from', 'front', 'fs', 'ft', 'fu', 'full', 'further', 'furthermore', 'fy', 'g', 'ga', 'gave', 'ge', 'get', 'gets', 'getting', 'gi', 'give', 'given', 'gives', 'giving', 'gj', 'gl', 'go', 'goes', 'going', 'gone', 'got', 'gotten', 'gr', 'greetings', 'gs', 'gy', 'h', 'h2', 'h3', 'had', 'hadn', 'hadn''t', 'happens', 'hardly', 'has', 'hasn', 'hasnt', 'hasn''t', 'have', 'haven', 'haven''t', 'having', 'he', 'hed', 'he''d', 'he''ll', 'hello', 'help', 'hence', 'her', 'here', 'hereafter', 'hereby', 'herein', 'heres', 'here''s', 'hereupon', 'hers', 'herself', 'hes', 'he''s', 'hh', 'hi', 'hid', 'him', 'himself', 'his', 'hither', 'hj', 'ho', 'home', 'hopefully', 'how', 'howbeit', 'however', 'how''s', 'hr', 'hs', 'http', 'hu', 'hundred', 'hy', 'i', 'i2', 'i3', 'i4', 'i6', 'i7', 'i8', 'ia', 'ib', 'ibid', 'ic', 'id', 'i''d', 'ie', 'if', 'ig', 'ignored', 'ih', 'ii', 'ij', 'il', 'i''ll', 'im', 'i''m', 'immediate', 'immediately', 'importance', 'important', 'in', 'inasmuch', 'inc', 'indeed', 'index', 'indicate', 'indicated', 'indicates', 'information', 'inner', 'insofar', 'instead', 'interest', 'into', 'invention', 'inward', 'io', 'ip', 'iq', 'ir', 'is', 'isn', 'isn''t', 'it', 'itd', 'it''d', 'it''ll', 'its', 'it''s', 'itself', 'iv', 'i''ve', 'ix', 'iy', 'iz', 'j', 'jj', 'jr', 'js', 'jt', 'ju', 'just', 'k', 'ke', 'keep', 'keeps', 'kept', 'kg', 'kj', 'km', 'know', 'known', 'knows', 'ko', 'l', 'l2', 'la', 'largely', 'last', 'lately', 'later', 'latter', 'latterly', 'lb', 'lc', 'le', 'least', 'les', 'less', 'lest', 'let', 'lets', 'let''s', 'lf', 'like', 'liked', 'likely', 'line', 'little', 'lj', 'll', 'll', 'ln', 'lo', 'look', 'looking', 'looks', 'los', 'lr', 'ls', 'lt', 'ltd', 'm', 'm2', 'ma', 'made', 'mainly', 'make', 'makes', 'many', 'may', 'maybe', 'me', 'mean', 'means', 'meantime', 'meanwhile', 'merely', 'mg', 'might', 'mightn', 'mightn''t', 'mill', 'million', 'mine', 'miss', 'ml', 'mn', 'mo', 'more', 'moreover', 'most', 'mostly', 'move', 'mr', 'mrs', 'ms', 'mt', 'mu', 'much', 'mug', 'must', 'mustn', 'mustn''t', 'my', 'myself', 'n', 'n2', 'na', 'name', 'namely', 'nay', 'nc', 'nd', 'ne', 'near', 'nearly', 'necessarily', 'necessary', 'need', 'needn', 'needn''t', 'needs', 'neither', 'never', 'nevertheless', 'new', 'next', 'ng', 'ni', 'nine', 'ninety', 'nj', 'nl', 'nn', 'no', 'nobody', 'non', 'none', 'nonetheless', 'noone', 'nor', 'normally', 'nos', 'not', 'noted', 'nothing', 'novel', 'now', 'nowhere', 'nr', 'ns', 'nt', 'ny', 'o', 'oa', 'ob', 'obtain', 'obtained', 'obviously', 'oc', 'od', 'of', 'off', 'often', 'og', 'oh', 'oi', 'oj', 'ok', 'okay', 'ol', 'old', 'om', 'omitted', 'on', 'once', 'one', 'ones', 'only', 'onto', 'oo', 'op', 'oq', 'or', 'ord', 'os', 'ot', 'other', 'others', 'otherwise', 'ou', 'ought', 'our', 'ours', 'ourselves', 'out', 'outside', 'over', 'overall', 'ow', 'owing', 'own', 'ox', 'oz', 'p', 'p1', 'p2', 'p3', 'page', 'pagecount', 'pages', 'par', 'part', 'particular', 'particularly', 'pas', 'past', 'pc', 'pd', 'pe', 'per', 'perhaps', 'pf', 'ph', 'pi', 'pj', 'pk', 'pl', 'placed', 'please', 'plus', 'pm', 'pn', 'po', 'poorly', 'possible', 'possibly', 'potentially', 'pp', 'pq', 'pr', 'predominantly', 'present', 'presumably', 'previously', 'primarily', 'probably', 'promptly', 'proud', 'provides', 'ps', 'pt', 'pu', 'put', 'py', 'q', 'qj', 'qu', 'que', 'quickly', 'quite', 'qv', 'r', 'r2', 'ra', 'ran', 'rather', 'rc', 'rd', 're', 'readily', 'really', 'reasonably', 'recent', 'recently', 'ref', 'refs', 'regarding', 'regardless', 'regards', 'related', 'relatively', 'research', 'research-articl', 'respectively', 'resulted', 'resulting', 'results', 'rf', 'rh', 'ri', 'right', 'rj', 'rl', 'rm', 'rn', 'ro', 'rq', 'rr', 'rs', 'rt', 'ru', 'run', 'rv', 'ry', 's', 's2', 'sa', 'said', 'same', 'saw', 'say', 'saying', 'says', 'sc', 'sd', 'se', 'sec', 'second', 'secondly', 'section', 'see', 'seeing', 'seem', 'seemed', 'seeming', 'seems', 'seen', 'self', 'selves', 'sensible', 'sent', 'serious', 'seriously', 'seven', 'several', 'sf', 'shall', 'shan', 'shan''t', 'she', 'shed', 'she''d', 'she''ll', 'shes', 'she''s', 'should', 'shouldn', 'shouldn''t', 'should''ve', 'show', 'showed', 'shown', 'showns', 'shows', 'si', 'side', 'significant', 'significantly', 'similar', 'similarly', 'since', 'sincere', 'six', 'sixty', 'sj', 'sl', 'slightly', 'sm', 'sn', 'so', 'some', 'somebody', 'somehow', 'someone', 'somethan', 'something', 'sometime', 'sometimes', 'somewhat', 'somewhere', 'soon', 'sorry', 'sp', 'specifically', 'specified', 'specify', 'specifying', 'sq', 'sr', 'ss', 'st', 'still', 'stop', 'strongly', 'sub', 'substantially', 'successfully', 'such', 'sufficiently', 'suggest', 'sup', 'sure', 'sy', 'system', 'sz', 't', 't1', 't2', 't3', 'take', 'taken', 'taking', 'tb', 'tc', 'td', 'te', 'tell', 'ten', 'tends', 'tf', 'th', 'than', 'thank', 'thanks', 'thanx', 'that', 'that''ll', 'thats', 'that''s', 'that''ve', 'the', 'their', 'theirs', 'them', 'themselves', 'then', 'thence', 'there', 'thereafter', 'thereby', 'thered', 'therefore', 'therein', 'there''ll', 'thereof', 'therere', 'theres', 'there''s', 'thereto', 'thereupon', 'there''ve', 'these', 'they', 'theyd', 'they''d', 'they''ll', 'theyre', 'they''re', 'they''ve', 'thickv', 'thin', 'think', 'third', 'this', 'thorough', 'thoroughly', 'those', 'thou', 'though', 'thoughh', 'thousand', 'three', 'throug', 'through', 'throughout', 'thru', 'thus', 'ti', 'til', 'tip', 'tj', 'tl', 'tm', 'tn', 'to', 'together', 'too', 'took', 'top', 'toward', 'towards', 'tp', 'tq', 'tr', 'tried', 'tries', 'truly', 'try', 'trying', 'ts', 't''s', 'tt', 'tv', 'twelve', 'twenty', 'twice', 'two', 'tx', 'u', 'u201d', 'ue', 'ui', 'uj', 'uk', 'um', 'un', 'under', 'unfortunately', 'unless', 'unlike', 'unlikely', 'until', 'unto', 'uo', 'up', 'upon', 'ups', 'ur', 'us', 'use', 'used', 'useful', 'usefully', 'usefulness', 'uses', 'using', 'usually', 'ut', 'v', 'va', 'value', 'various', 'vd', 've', 've', 'very', 'via', 'viz', 'vj', 'vo', 'vol', 'vols', 'volumtype', 'vq', 'vs', 'vt', 'vu', 'w', 'wa', 'want', 'wants', 'was', 'wasn', 'wasnt', 'wasn''t', 'way', 'we', 'wed', 'we''d', 'welcome', 'well', 'we''ll', 'well-b', 'went', 'were', 'we''re', 'weren', 'werent', 'weren''t', 'we''ve', 'what', 'whatever', 'what''ll', 'whats', 'what''s', 'when', 'whence', 'whenever', 'when''s', 'where', 'whereafter', 'whereas', 'whereby', 'wherein', 'wheres', 'where''s', 'whereupon', 'wherever', 'whether', 'which', 'while', 'whim', 'whither', 'who', 'whod', 'whoever', 'whole', 'who''ll', 'whom', 'whomever', 'whos', 'who''s', 'whose', 'why', 'why''s', 'wi', 'widely', 'will', 'willing', 'wish', 'with', 'within', 'without', 'wo', 'won', 'wonder', 'wont', 'won''t', 'words', 'world', 'would', 'wouldn', 'wouldnt', 'wouldn''t', 'www', 'x', 'x1', 'x2', 'x3', 'xf', 'xi', 'xj', 'xk', 'xl', 'xn', 'xo', 'xs', 'xt', 'xv', 'xx', 'y', 'y2', 'yes', 'yet', 'yj', 'yl', 'you', 'youd', 'you''d', 'you''ll', 'your', 'youre', 'you''re', 'yours', 'yourself', 'yourselves', 'you''ve', 'yr', 'ys', 'yt', 'z', 'zero', 'zi', 'zz')
GROUP BY x.Item
ORDER BY COUNT(*) DESC
Here's the result of the above code, as you can see it's not counting correctly:
Word Counts
server 2
sql 2
data 1
database 1
popular 1
powerful 1
Can anyone help on this? Would be really appreciated!
You can make use of String_split here, such as
select value Word, Count(*) Counts
from words
cross apply String_Split(text_column, ' ')
where value not in(exclude list)
group by value
order by counts desc;
You should should the string_split function -- like this
SELECT id, value as aword
FROM words
CROSS APPLY STRING_SPLIT(text_column, ',');
This will create a table with all the words by id -- to get the count do this:
SELECT aword, count(*) as counts
FROM (
SELECT id, value as aword
FROM words
CROSS APPLY STRING_SPLIT(text_column, ',');
) x
GROUP BY aword
You may need to lower case the LOWER(text_column) if you want it to not matter
If you don't have access to STRING_SPLIT function, you can use weird xml trick to convert space to a word node and then shred it with nodes function:
select word, COUNT(*)
from (
select n.value('.', 'nvarchar(50)') AS word
from (
VALUES
(1, 'SQL Server is a popular database management system'),
(2, 'It is widely used for data storage and retrieval'),
(3, 'SQL Server is a powerful tool for data analysis')
) AS t (id, txt)
CROSS APPLY (
SELECT CAST('<x>' + REPLACE(txt, ' ', '</x><x>') + '</x>' AS XML) x
) x
CROSS APPLY x.nodes('x') z(n)
) w
GROUP BY word
Of course, this will fail on "bad" words and invalid xml-characters but it can be worked on. Text processing has never been SQL Server's strong-point though, so probably better to use some NLP library to do this kind of stuff

Preparing bollinger bands with postgresql - STDDEV and PARTITION BY

I'm working on a pet project on cryptocurrency and Bollinger Bands, and I'm stuck on a problem I'm not able to solve.
Given this table:
CREATE TABLE public.dataset
(
"From_symbol" character varying(10) COLLATE pg_catalog."default" NOT NULL,
"To_symbol" character varying(10) COLLATE pg_catalog."default" NOT NULL,
"Timestamp" timestamp without time zone NOT NULL,
"Open" numeric(18,9),
"High" numeric(18,9),
"Low" numeric(18,9),
"Close" numeric(18,9),
"Volume_From" numeric(18,9),
"Volume_To" numeric(18,9),
"Weighted_Price" numeric(18,9),
"Id" integer NOT NULL DEFAULT nextval('dataset_id_seq'::regclass),
CONSTRAINT dataset_pkey PRIMARY KEY ("From_symbol", "To_symbol", "Timestamp")
If I run the following query
SELECT "From_symbol",
"To_symbol",
"Timestamp",
"Open",
"High",
"Low",
"Close",
"Volume_From",
"Volume_To",
"Weighted_Price",
AVG("Close") OVER
(PARTITION BY "Id"
ORDER BY "Id"
ROWS BETWEEN 19 PRECEDING AND CURRENT ROW) AS SMA20,
AVG("Close") OVER
(PARTITION BY "Id"
ORDER BY "Id"
ROWS BETWEEN 19 PRECEDING AND CURRENT ROW) +
STDDEV_SAMP("Close") OVER
(PARTITION BY "Id"
ORDER BY "Id"
ROWS BETWEEN 19 PRECEDING AND CURRENT ROW) * 2 AS "Upper_Bollinger_Band",
AVG("Close") OVER
(PARTITION BY "Id"
ORDER BY "Id"
ROWS BETWEEN 19 PRECEDING AND CURRENT ROW) -
STDDEV_SAMP("Close") OVER
(PARTITION BY "Id"
ORDER BY "Id"
ROWS BETWEEN 19 PRECEDING AND CURRENT ROW) * 2 AS "Lower_Bollinger_Band"
FROM public.dataset;
I get a null result on both the upper and lower bollinger bands.
While I have a very large dataset (2012-2020), I provide you with a sample of 40 lines. This should be enough in case you wish to test it.
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2011-12-31 08:52:00', 4.390000000, 4.390000000, 4.390000000, 4.390000000, 0.455580870, 2.000000019, 4.390000000, 1);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2011-12-31 16:50:00', 4.390000000, 4.390000000, 4.390000000, 4.390000000, 48.000000000, 210.720000000, 4.390000000, 2);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2011-12-31 17:59:00', 4.500000000, 4.570000000, 4.500000000, 4.570000000, 37.862297230, 171.380337530, 4.526411498, 3);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2011-12-31 18:00:00', 4.580000000, 4.580000000, 4.580000000, 4.580000000, 9.000000000, 41.220000000, 4.580000000, 4);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-01 05:16:00', 4.580000000, 4.580000000, 4.580000000, 4.580000000, 1.502000000, 6.879160000, 4.580000000, 5);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-01 16:28:00', 4.840000000, 4.840000000, 4.840000000, 4.840000000, 10.000000000, 48.400000000, 4.840000000, 6);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-01 23:45:00', 5.000000000, 5.000000000, 5.000000000, 5.000000000, 10.100000000, 50.500000000, 5.000000000, 7);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-02 21:04:00', 5.000000000, 5.000000000, 5.000000000, 5.000000000, 19.048000000, 95.240000000, 5.000000000, 8);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-03 12:45:00', 5.320000000, 5.320000000, 5.320000000, 5.320000000, 2.419172930, 12.869999988, 5.320000000, 9);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-03 15:22:00', 5.140000000, 5.140000000, 5.140000000, 5.140000000, 0.680000000, 3.495200000, 5.140000000, 10);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-03 15:54:00', 5.260000000, 5.260000000, 5.260000000, 5.260000000, 29.319391630, 154.219999970, 5.260000000, 11);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-03 16:32:00', 5.290000000, 5.290000000, 5.290000000, 5.290000000, 29.302457470, 155.010000020, 5.290000000, 12);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-03 18:10:00', 5.290000000, 5.290000000, 5.290000000, 5.290000000, 11.285444230, 59.699999977, 5.290000000, 13);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-03 18:14:00', 5.140000000, 5.140000000, 5.140000000, 5.140000000, 0.020000000, 0.102800000, 5.140000000, 14);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-03 18:26:00', 5.290000000, 5.290000000, 5.290000000, 5.290000000, 11.000000000, 58.190000000, 5.290000000, 15);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-03 18:27:00', 5.290000000, 5.290000000, 5.290000000, 5.290000000, 4.010814660, 21.217209551, 5.290000000, 16);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-04 05:17:00', 4.930000000, 4.930000000, 4.930000000, 4.930000000, 2.320000000, 11.437600000, 4.930000000, 17);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-04 06:05:00', 4.930000000, 4.930000000, 4.930000000, 4.930000000, 9.680000000, 47.722400000, 4.930000000, 18);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-04 13:41:00', 5.190000000, 5.190000000, 5.190000000, 5.190000000, 2.641618500, 13.710000015, 5.190000000, 19);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-04 13:57:00', 5.190000000, 5.190000000, 5.190000000, 5.190000000, 8.724470130, 45.279999975, 5.190000000, 20);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-04 16:39:00', 5.190000000, 5.190000000, 5.190000000, 5.190000000, 16.344726030, 84.829128096, 5.190000000, 21);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-04 16:53:00', 5.320000000, 5.320000000, 5.320000000, 5.320000000, 0.186090230, 0.990000024, 5.320000000, 22);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-04 16:54:00', 5.320000000, 5.320000000, 5.320000000, 5.320000000, 10.394736840, 55.299999989, 5.320000000, 23);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-04 17:00:00', 5.360000000, 5.370000000, 5.360000000, 5.370000000, 13.629422720, 73.060000006, 5.360461812, 24);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-04 18:51:00', 5.370000000, 5.570000000, 5.370000000, 5.570000000, 43.312195780, 235.747069370, 5.442972011, 25);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-05 02:40:00', 5.720000000, 5.720000000, 5.720000000, 5.720000000, 5.000000000, 28.600000000, 5.720000000, 26);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-05 04:52:00', 5.750000000, 5.750000000, 5.750000000, 5.750000000, 5.200000000, 29.900000000, 5.750000000, 27);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-05 08:19:00', 5.750000000, 5.790000000, 5.750000000, 5.790000000, 14.800000000, 85.500000000, 5.777027027, 28);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-05 09:58:00', 6.000000000, 6.000000000, 6.000000000, 6.000000000, 2.236666670, 13.420000020, 6.000000000, 29);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-05 10:03:00', 6.000000000, 6.000000000, 6.000000000, 6.000000000, 0.168482700, 1.010896200, 6.000000000, 30);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-05 10:48:00', 6.150000000, 6.150000000, 6.150000000, 6.150000000, 10.000000000, 61.500000000, 6.150000000, 31);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-05 11:08:00', 6.190000000, 6.190000000, 6.190000000, 6.190000000, 0.571890150, 3.540000029, 6.190000000, 32);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-05 11:10:00', 6.190000000, 6.230000000, 6.190000000, 6.230000000, 16.000000000, 99.285718902, 6.205357431, 33);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-05 11:48:00', 6.230000000, 6.250000000, 6.230000000, 6.250000000, 14.000000000, 87.420000000, 6.244285714, 34);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-05 12:20:00', 6.460000000, 6.460000000, 6.460000000, 6.460000000, 0.773993810, 5.000000013, 6.460000000, 35);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-05 12:21:00', 6.460000000, 6.460000000, 6.460000000, 6.460000000, 0.178018570, 1.149999962, 6.460000000, 36);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-05 12:28:00', 6.430000000, 6.430000000, 6.430000000, 6.430000000, 0.311041990, 1.999999996, 6.430000000, 37);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-05 14:07:00', 6.440000000, 6.440000000, 6.440000000, 6.440000000, 0.310559010, 2.000000024, 6.440000000, 38);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-05 14:38:00', 6.430000000, 6.430000000, 6.430000000, 6.430000000, 0.466562990, 3.000000026, 6.430000000, 39);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-05 15:31:00', 6.420000000, 6.420000000, 6.420000000, 6.420000000, 0.311526480, 2.000000002, 6.420000000, 40);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-05 23:50:00', 6.430000000, 6.430000000, 6.430000000, 6.430000000, 0.311526480, 2.003115266, 6.430000000, 41);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-06 00:35:00', 6.440000000, 6.440000000, 6.440000000, 6.440000000, 0.466562990, 3.004665656, 6.440000000, 42);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-06 00:39:00', 6.470000000, 6.470000000, 6.470000000, 6.470000000, 0.952012380, 6.159520099, 6.470000000, 43);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-06 00:41:00', 6.650000000, 6.650000000, 6.650000000, 6.650000000, 20.777443610, 138.170000010, 6.650000000, 44);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-06 00:43:00', 6.650000000, 6.650000000, 6.650000000, 6.650000000, 1.466275650, 9.750733073, 6.650000000, 45);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-06 00:46:00', 6.650000000, 6.650000000, 6.650000000, 6.650000000, 0.499265780, 3.320117437, 6.650000000, 46);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-06 07:02:00', 6.650000000, 6.650000000, 6.650000000, 6.650000000, 1.425497660, 9.479559439, 6.650000000, 47);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-06 07:04:00', 6.690000000, 6.730000000, 6.690000000, 6.730000000, 6.310000000, 42.363858320, 6.713765186, 48);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-06 14:20:00', 6.800000000, 6.900000000, 6.800000000, 6.900000000, 9.310559010, 63.611801268, 6.832221481, 49);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-06 16:21:00', 6.760000000, 6.760000000, 6.760000000, 6.760000000, 0.295857990, 2.000000012, 6.760000000, 50);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-06 16:36:00', 6.500000000, 6.500000000, 6.500000000, 6.500000000, 0.500000000, 3.250000000, 6.500000000, 51);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-06 16:37:00', 6.490000000, 6.490000000, 6.490000000, 6.490000000, 1.540832050, 10.000000005, 6.490000000, 52);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-06 17:37:00', 6.400000000, 6.400000000, 6.400000000, 6.400000000, 0.500000000, 3.200000000, 6.400000000, 53);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-06 18:59:00', 6.400000000, 6.400000000, 6.400000000, 6.400000000, 1.550387590, 9.922480576, 6.400000000, 54);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-06 19:00:00', 6.400000000, 6.400000000, 6.400000000, 6.400000000, 0.838759680, 5.368061952, 6.400000000, 55);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-06 19:42:00', 6.400000000, 6.400000000, 6.400000000, 6.400000000, 9.110852730, 58.309457472, 6.400000000, 56);
INSERT INTO public.dataset VALUES ('BTC', 'USD', '2012-01-06 21:18:00', 6.300000000, 6.300000000, 6.300000000, 6.300000000, 0.500000000, 3.150000000, 6.300000000, 57);
Would you be so kind as to let me understand what am I doing wrong? I traced the problem to the STDDEV usage, but I have no clue on why the PARTITION BY clause works on AVG and fails on the STDDEV.
I'm running PostgreSQL 12.2 on Ubuntu:
PostgreSQL 12.2 (Ubuntu 12.2-4) on x86_64-pc-linux-gnu, compiled by gcc (Ubuntu 9.3.0-8ubuntu1) 9.3.0, 64-bit
Thanks!
The reason is this logic;
AVG("Close") OVER
(PARTITION BY "Id"
------------------^
ORDER BY "Id"
ROWS BETWEEN 19 PRECEDING AND CURRENT ROW
) AS SMA20,
Your id is a unique value on each row so the partition ha only one row -- and the standard deviation of a single value is not defined.
Presumably, you intend:
AVG(close) OVER
(PARTITION BY from_symbol, to_symbol
ORDER BY timestamp
ROWS BETWEEN 19 PRECEDING AND CURRENT ROW
) AS SMA20,
Notes:
Do not enclose identifiers in double quotes. That just makes it harder to write queries.
Why are you using a sequence when you can simply use generated always as identity?
Use the timestamp for ordering rather than the id.
Have you checked your results ? do the averages look right to you ?
I say this because your Id is unique and if you PARTITON BY on it, you will get 1 row partitions. While you can average 1 row, you cannot compute the standard deviation for one single row.
My suggestion would be to remove the PARTITION BY "Id" from all your aggregate functions. It seems you want to use the whole table as 1 partition anyway, or find the right column to partition by. A good candidate might be the From_symbol,To_symbol pair as you do not want to mix exchange pairs. So my suggestion would be to PARTITION BY "From_symbol","To_symbol" but you know the data best.

Recursive Matching using CTE Query in SQL Server

I have two tables (they are defined below and you can use the SQL below to build them)
IF EXISTS (SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = 'ETab')
DROP TABLE ETab;
GO
CREATE TABLE ETab
([MRN] varchar(20), [LSPEC] varchar(2), [ADT] DATETIME, [SDT] DATETIME, [Source] varchar(20), [Enum] varchar(20));
GO
INSERT INTO ETab ([MRN], [LSPEC], [ADT], [SDT], [Source], [Enum])
VALUES
('HOMECARE', 'HM', CONVERT(datetime, '2017-04-01 00:00:00.000', 20), CONVERT(datetime, '2017-04-30 00:00:00.000', 20), 'PRODPAT', 'HOMEBLD04'),
('HOMECARE', 'HM', CONVERT(datetime, '2017-05-01 00:00:00.000', 20), CONVERT(datetime, '2017-05-31 00:00:00.000', 20), 'PRODPAT', 'HOMEBLD05'),
('HOMECARE', 'HM', CONVERT(datetime, '2017-06-01 00:00:00.000', 20), CONVERT(datetime, '2017-06-30 00:00:00.000', 20), 'PRODPAT', 'HOMEBLD06'),
('HOMECARE', 'HM', CONVERT(datetime, '2017-07-01 00:00:00.000', 20), CONVERT(datetime, '2017-07-31 00:00:00.000', 20), 'PRODPAT', 'HOMEBLD07'),
('HOMECARE', 'HM', CONVERT(datetime, '2017-08-01 00:00:00.000', 20), CONVERT(datetime, '2017-08-31 00:00:00.000', 20), 'PRODPAT', 'HOMEBLD08'),
('HOMECARE', 'HM', CONVERT(datetime, '2017-09-01 00:00:00.000', 20), CONVERT(datetime, '2017-09-30 00:00:00.000', 20), 'PRODPAT', 'HOMEBLD09'),
('HOMECARE', 'HQ', CONVERT(datetime, '2017-04-01 00:00:00.000', 20), CONVERT(datetime, '2017-04-30 00:00:00.000', 20), 'PRODPAT', 'HOMEDRG04HM'),
('HOMECARE', 'HM', CONVERT(datetime, '2017-05-01 00:00:00.000', 20), CONVERT(datetime, '2017-05-31 00:00:00.000', 20), 'PRODPAT', 'HOMEDRG05HM'),
('HOMECARE', 'HM', CONVERT(datetime, '2017-06-01 00:00:00.000', 20), CONVERT(datetime, '2017-06-30 00:00:00.000', 20), 'PRODPAT', 'HOMEDRG06HM'),
('HOMECARE', 'HM', CONVERT(datetime, '2017-07-01 00:00:00.000', 20), CONVERT(datetime, '2017-07-31 00:00:00.000', 20), 'PRODPAT', 'HOMEDRG07HM'),
('HOMECARE', 'HM', CONVERT(datetime, '2017-08-01 00:00:00.000', 20), CONVERT(datetime, '2017-08-31 00:00:00.000', 20), 'PRODPAT', 'HOMEDRG08HM'),
('HOMECARE', 'HM', CONVERT(datetime, '2017-09-01 00:00:00.000', 20), CONVERT(datetime, '2017-09-30 00:00:00.000', 20), 'PRODPAT', 'HOMEDRG09HM'),
('111824', 'UR', CONVERT(datetime, '2017-09-22 00:00:00.000', 20), CONVERT(datetime, '2017-09-22 00:00:00.000', 20), 'OP', 'OP1118240003'),
('111824', 'NL', CONVERT(datetime, '2017-04-19 00:00:00.000', 20), CONVERT(datetime, '2017-04-19 00:00:00.000', 20), 'OP', 'OP1118240001'),
('111824', 'MS', CONVERT(datetime, '2017-06-30 00:00:00.000', 20), CONVERT(datetime, '2017-06-30 00:00:00.000', 20), 'OP', 'OP1118240002'),
('111824', 'MS', CONVERT(datetime, '2017-04-24 00:00:00.000', 20), CONVERT(datetime, '2017-04-24 00:00:00.000', 20), 'IP', 'IP1118240001'),
('111824', 'MS', CONVERT(datetime, '2017-04-28 00:00:00.000', 20), CONVERT(datetime, '2017-04-28 00:00:00.000', 20), 'IP', 'IP1118240005'),
('111824', 'MS', CONVERT(datetime, '2017-04-27 00:00:00.000', 20), CONVERT(datetime, '2017-04-27 00:00:00.000', 20), 'IP', 'IP1118240004'),
('111824', 'MS', CONVERT(datetime, '2017-04-26 00:00:00.000', 20), CONVERT(datetime, '2017-04-26 00:00:00.000', 20), 'IP', 'IP1118240003'),
('111824', 'MS', CONVERT(datetime, '2017-04-25 00:00:00.000', 20), CONVERT(datetime, '2017-04-25 00:00:00.000', 20), 'IP', 'IP1118240002');
GO
IF EXISTS (SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = 'UTab')
DROP TABLE UTab;
GO
CREATE TABLE UTab
(MRN varchar(20), SIDate DATETIME, LSPEC varchar(2), Source varchar(20), Enum varchar(20), Iteration varchar(20));
GO
INSERT INTO UTab
(MRN, SIDate, LSPEC, Source, Enum, Iteration)
VALUES
('HOMECARE', CONVERT(datetime, '2017-04-20 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-04-20 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-04-20 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-04-20 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-04-20 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-04-20 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-04-30 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-04-30 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-04-30 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-04-20 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-04-30 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-04-30 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-05-17 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-04-30 00:00:00.000', 20), 'HQ', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-04-30 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-04-30 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-05-30 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-06-01 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-06-01 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-06-01 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-06-01 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-06-01 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-06-01 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-05-30 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-06-01 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-06-26 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-06-26 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-06-26 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-06-26 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-06-26 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-06-26 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-06-26 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-06-26 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-06-26 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-06-26 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-06-26 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-06-26 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-06-26 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-06-26 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-06-26 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-06-26 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-06-26 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-06-26 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-05-30 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-05-30 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-05-04 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-05-04 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-05-04 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-05-04 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-05-04 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-05-04 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-05-04 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-05-04 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-05-04 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-05-04 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-05-04 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-05-04 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-05-04 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-05-04 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-05-04 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('HOMECARE', CONVERT(datetime, '2017-05-04 00:00:00.000', 20), 'HM', 'N', NULL, NULL),
('111824', CONVERT(datetime, '2017-04-21 00:00:00.000', 20), 'MS', 'IP', NULL, NULL),
('111824', CONVERT(datetime, '2017-04-24 00:00:00.000', 20), 'NL', 'OP', NULL, NULL),
('111824', CONVERT(datetime, '2017-04-27 00:00:00.000', 20), 'NL', 'OP', NULL, NULL),
('111824', CONVERT(datetime, '2017-04-20 00:00:00.000', 20), 'NL', 'OP', NULL, NULL),
('111824', CONVERT(datetime, '2017-04-20 00:00:00.000', 20), 'NL', 'OP', NULL, NULL),
('111824', CONVERT(datetime, '2017-04-20 00:00:00.000', 20), 'NL', 'OP', NULL, NULL);
GO
SELECT * FROM ETab
WHERE Source = 'PRODPAT' AND LSPEC = 'HM'
GO
SELECT * FROM UTab
WHERE LSPEC = 'HM';
GO
IF EXISTS (SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = '__Tmp')
DROP TABLE __Tmp;
GO
I have a C# application in which the user requests certian matching clauses at run-time. Using the information specified by the user, I generate some SQL that is executed and updates UTab.
Via the generated SQL, I wish to perform some matching which updates the [UTab].[Enum] based on the link field [MRN] and whether [UTab].[SIDate] lies within [ETab].[ADT] and [ETab].[SDT] +- some days before or after which are specified by the user. So, the user might specify two commands, the first:
MatchCmd:MRN,LSPEC:ETab:UTab:ADT:SDT:SIDate:0:1:'Iteration#1':WHERE [UTab].[Source] = 'OP' AND [UTab].[LSPEC] = [ETab].[LSPEC]
This says, match on MRN AND LSPEC from source table ETab, updating UTab where SIDate > ADT - 0 day(s) and SIDate < SDT + 1 day(s) and where [UTab].[Source] = 'OP' - any matches I mark with the "iteration number" Iteration#1.
My code generates the following SQL:
;WITH cte AS (
SELECT [ETab].[Enum] AS Enum,
[ETab].[MRN] AS Link,
[ETab].[ADT] AS ADT,
[ETab].[SDT] AS SDT,
[UTab].[SIDate] AS DT,
[ETab].[MRN] AS [MRN],
[ETab].[LSPEC] AS [LSPEC],
[ETab].[Source] AS [Source],
ROW_NUMBER() OVER (PARTITION BY [UTab].[MRN], [UTab].[LSPEC], [UTab].[SIDate]
ORDER BY ABS(DATEDIFF(mi, [UTab].[SIDate], [ETab].[ADT]))) AS Idx,
ABS(DATEDIFF(mi, [UTab].[SIDate], [ETab].[ADT])) AS Diff
FROM [UTab]
LEFT JOIN [ETab] ON [UTab].[MRN] = [ETab].[MRN]
WHERE ([UTab].[SIDate] BETWEEN
DATEADD(dd, -0, [ETab].[ADT]) AND
DATEADD(dd, 1, [ETab].[SDT]) AND [Iteration] IS NULL)
AND ETab.Source = 'OP'
) SELECT *
INTO __Tmp
FROM cte;
GO
To get all the records that match the criteria (laying within the time window and abiding by custom where clauses). Then I Update the [UTab].[Enum] and [UTab].[Iteration] via another generated query
UPDATE [UTab]
SET [ENum] = [__Tmp].[ENum], [Iteration] = N'Iteration#1'
--SELECT __Tmp.ENum, __Tmp.Link, __Tmp.LSPEC, __Tmp.ADT, __Tmp.SDT, __Tmp.DT, __Tmp.Idx
FROM [UTab] AS up
INNER JOIN [__Tmp]
ON [up].[MRN] = [__Tmp].[Link]
AND [up].[SIDate] = [__Tmp].[DT]
AND [up].[LSPEC] = [__Tmp].[LSPEC]
WHERE __Tmp.Idx = 1;
This seems to work okay, but some questions:
Q. Is there anything clearly ary with the method/SQL I am using?
Thanks for your time.
The CTE usage was a bit strange to me, since you're not really doing much with it in the subsequent query. I'd move that to the UPDATE.
The query doesn't really join on the Source. I'm not sure if it's meant to do that or not. If UTab has multiple sources for MRN/LSPEC combination, that might result in an issue.
So, I come up with something like:
DECLARE #ADT_Adjustment INT = 0;
DECLARE #SDT_Adjustment INT = 1;
DECLARE #Iteration INT = 1;
WITH SequencedJoin AS (
SELECT
ETab.MRN, ETab.LSPEC, ETab.ADT, ETab.SDT, UTab.SIDate, ETab.Enum, ETab.[Source], UTab.Enum AS WriteEnum, UTab.Iteration AS WriteIteration
, DENSE_RANK() OVER (
PARTITION BY UTab.MRN, UTab.LSPEC, UTab.[Source], UTab.SIDate
ORDER BY ABS( DATEDIFF( MINUTE, UTab.SIDate, ETab.ADT ) )
) AS Ordinal
FROM
#UTab AS UTab
JOIN #ETab AS ETab ON (
ETab.MRN = UTab.MRN
AND ETab.LSPEC = UTab.LSPEC
AND ETab.[Source] = UTab.[Source]
AND UTab.SIDate BETWEEN DATEADD( dd, -#ADT_Adjustment, ETab.ADT ) AND DATEADD( dd, #SDT_Adjustment, ETab.SDT )
)
WHERE
UTab.Iteration IS NULL
)
UPDATE
SequencedJoin
SET
WriteEnum = SequencedJoin.Enum
, WriteIteration = N'Iteration#' + CAST( #Iteration AS VARCHAR( 2 ) )
WHERE
SequencedJoin.[Source] = 'OP'
AND SequencedJoin.Ordinal = 1
up.L != cte.L since you are looking for anything conforming your conditions and filtering by rn.
SELECT cte.E, [Iteration] = N'00-00-00-CA', *
FROM [Up]
INNER JOIN cte ON [Up].[M] = [cte].[M] AND [cte].[rn] = 1
WHERE [cte].[E] IS NOT NULL AND (
[Up].[DTE] BETWEEN
DATEADD(dd, -0, [cte].[ADT]) AND
DATEADD(dd, 0, [cte].[SDT]))
AND [Up].[F] = 'Y'
AND [Up].[S] = 'HC'
----comment this line
--AND [Up].[L] = [cte].[L]; -- <<<<<<<<<<<<<<<<
http://sqlfiddle.com/#!18/d1483/2/0
I modified some data: added ZZ which broke your query totally. Only two rows became matching.
ps
fixed insert issue with length of E column and col name listed in first insert.
Not a complete answer as such, but this index will speed up you CTE:
CREATE INDEX T1 ON UTAB (
MRN,
SIDATE
)
INCLUDE
(
LSPEC,
Iteration
)

Else do nothing SQL query

I have a field, froiexported, in DB table claim3 that is either set to one or zero. I want to run an update where if the criteria in the case statement is met the value in froiexported is set to 1 else do nothing. Below will make my results incorrect every day.
update claim3
set froiexpoted =
CASE
WHEN froimaintdate >= dateadd(day,datediff(day,1,GETDATE()),0)
AND froimaintdate < dateadd(day,datediff(day,0,GETDATE()),0)
AND c1.jurst in ('AK', 'AL', 'CA', 'CO', 'FL', 'GA', 'IA', 'IN', 'KS', 'KY', 'LA', 'MA', 'ME', 'MN', 'MO', 'MS', 'NC', 'NE', 'NJ', 'PA', 'RI', 'SC', 'TN', 'TX', 'UT', 'VA', 'VT', 'WV')
THEN '1'
ELSE '0'
END
You can use a where clause instead:
update claim3
set froiexpoted = 1
where froiexpoted <> 1
and froimaintdate >= dateadd(day,datediff(day,1,getdate()),0)
and froimaintdate < dateadd(day,datediff(day,0,getdate()),0)
and c1.jurst in ('AK', 'AL', 'CA', 'CO', 'FL', 'GA', 'IA', 'IN'
, 'KS','KY', 'LA', 'MA', 'ME', 'MN', 'MO', 'MS', 'NC', 'NE'
, 'NJ', 'PA', 'RI', 'SC', 'TN', 'TX', 'UT', 'VA', 'VT', 'WV'
)
if you need to set 0s for the previous day as well:
update claim3
set froiexpoted = case
when c1.jurst in ('AK', 'AL', 'CA', 'CO', 'FL', 'GA', 'IA', 'IN'
, 'KS','KY', 'LA', 'MA', 'ME', 'MN', 'MO', 'MS', 'NC', 'NE'
, 'NJ', 'PA', 'RI', 'SC', 'TN', 'TX', 'UT', 'VA', 'VT', 'WV'
)
then 1
else 0
end
where froimaintdate >= dateadd(day,datediff(day,1,getdate()),0)
and froimaintdate < dateadd(day,datediff(day,0,getdate()),0)
How about setting it to 1 if criteria are met, else set to the current value?

nested select TOP 1 statement

I am working on a system currently for Asset Management within our company.
The system allows users to sign in/out items on either a permenant or temporary basis.
I am working on some reporting on the system and have hit a stumbling block with returning the relevent data.
So i know that i need a nested Select TOP 1 statement within my main query but i cant get it to work.
The main query is as follows:
SELECT [Asset-User].ID, [Asset-User].Asset_ID, [Assets].ID, [Assets].Signed_Out, [Assets].Asset_Tag_Serial_Num, [Assets].Name_Hostname, [Assets].Type_ID, [Asset_Type].ID, [Asset_Type].Name_Model, [Asset-User].User_ID, [Company_Users].ID, [Asset-User].Sign_Out_Date, [Asset-User].Return_Date, [Asset-User].[Perm_Signout?]
FROM [Asset-User], [Assets], [Asset_Type], [Company_Users]
WHERE ([User_ID] = '1') AND [Asset-User].Asset_ID = [Assets].ID AND [Assets].Type_ID = [Asset_Type].ID AND [Asset-User].User_ID = [Company_Users].ID AND [Assets].Signed_Out = '1'
So this query returns everything that is currently marked as "Signed Out" that has ever been signed out by User 1.
Somewhere in here i need to add a nested Select TOP 1 on Asset-User.ID so that it only returns items that are marked as permanent sign outs, OR if not permanent that they have most recently been signed out by User 1.
This would then only give me a list of items currently assigned to that user and not display the Asset if someone else has signed it out since
Any help would be greatly appreciated
As per request Sample data below:
https://docs.google.com/spreadsheets/d/1o4T6bsxyO-1dGE0-FUtWFboRupcq4o4V9i2Em0_BjyU/edit?usp=sharing
First sheet shows actual results, second sheet shows roughly what should be expected
As you will see here, this user has signed out a few items multiple times (and those items may not most recently have been signed out by this user)
Its hard to get the sample data but in essence there should be no duplicate Asset_ID's in the list as only the most recent ID (sign out ID this relates to) should be displayed for each Asset_ID.
Does that help?
Thanks again
As per request here is the Schema build code that will create a sample DB to work with
CREATE TABLE Asset_Type
([ID] int, [Global_ID] int, [Name_Model] varchar(30), [Description_Spec] varchar(54))
;
INSERT INTO Asset_Type
([ID], [Global_ID], [Name_Model], [Description_Spec])
VALUES
(1, 1, 'Dell Optiplex 3020', 'Windows 7 Professional, Intel Core i3 3.40GHz,4Gb RAM'),
(2, 3, 'Viewsonic VA2231wa', 'Viewsonic Widescreen Monitor'),
(3, 3, 'Samsung S24B150BL', 'Samsung LED 24" Widescreen Monitor')
;
CREATE TABLE Assets
([ID] int, [Asset_Tag_Serial_Num] varchar(29), [Type_ID] int, [Purchase_Date] varchar(10), [Purchase_Price] varchar(7), [Name_Hostname] varchar(36), [Signed_Out] int)
;
INSERT INTO Assets
([ID], [Asset_Tag_Serial_Num], [Type_ID], [Purchase_Date], [Purchase_Price], [Name_Hostname], [Signed_Out])
VALUES
(1, '0206', 1, '2013-11-29', '323.30', 'WS0206', 1),
(3, '0226', 2, NULL, NULL, 'Viewsonic VA2231wa - 0226', 1),
(4, '0204', 1, '2013-11-29', '323.00', 'WS0204', 1),
(5, '0205', 1, '2013-11-29', '323.00', 'WS0205', 1),
(6, '0108', 1003, NULL, NULL, 'Small Office Sat Nav', 1),
(7, '0092', 1004, NULL, NULL, 'Large Office Sat Nav', 1),
(8, 'GWC36-DHDBC-J2MXY-H2BGY-8C79G', 1005, '1900-01-01', '0.00', 'MS Office for WS0020', 1),
(9, '0020', 1006, '1900-01-01', '0.00', 'WS0020', 1),
(10, '0173', 2, '1900-01-01', '0.00', 'Viewsonic VA2231wa - 0173', 1),
(11, '0172', 1007, '1900-01-01', '0.00', 'Dell 19" Monitor 0172', 1),
(12, '00104926EC6B', 1008, '1900-01-01', '0.00', 'Shortel 230 - EC6B', 1),
(13, '0227', 1009, NULL, NULL, 'Blue - Yeti Mic', 0),
(14, '0221', 1, NULL, NULL, 'WS0221', 1),
(15, '0222', 1, '2013-11-29', '323.00', 'WS0222', 1),
(16, '0223', 1, NULL, NULL, 'WS0223', 1),
(17, '0220', 1, '2013-11-29', '323.00', 'WS0220', 1),
(18, '0217', 1, '2013-11-29', '323.00', 'WS0217', 1),
(19, '0218', 1, NULL, NULL, 'WS0218', 1),
(20, '0219', 1, '2013-11-29', '323.00', 'WS0219', 1),
(21, '0228', 2, NULL, NULL, 'Viewsonic VA2231wa - 0228', 1),
(22, '0229', 1010, NULL, NULL, 'Dell 19" Monitor 0229', 1),
(23, '00104931AA16', 1011, NULL, NULL, 'Shortel 115 - AA16', 1),
(24, '0093 - DYTJ18X4DJ8T', 1012, NULL, NULL, 'Office IPad 3', 1),
(25, '0095', 1013, '1900-01-01', '0.00', '320Gb External HDD', 1),
(26, '0071', 1014, NULL, NULL, '0071 - NEC Projector', 0),
(27, '0072', 1015, NULL, NULL, '0072 - Black Dell Projector', 0),
(28, '0073', 1016, '1900-01-01', '0.00', '0073 - Dell Projector', 0),
(29, '0230', 1017, '1900-01-01', '0.00', '0230 - Silver Dell Projector', 0),
(30, '0064', 1018, NULL, NULL, 'WS0064', 0),
(31, '0231', 1019, NULL, NULL, 'Freecom 1GB Pen - 0231', 1),
(47, '0165', 2, NULL, NULL, 'Viewsonic VA2231wa - 0165', 1),
(48, '0232', 1010, '1900-01-01', '0.00', 'Dell 19" Monitor 0232', 1),
(49, '0233', 1010, '1900-01-01', '0.00', 'Dell 19" Monitor 0233', 1),
(50, '0137', 1022, NULL, NULL, 'Viewsonic VA2248-LED - 0137', 1),
(51, '0234', 1010, '1900-01-01', '0.00', 'Dell 19" Monitor 0234', 1),
(52, '0235', 1010, '1900-01-01', '0.00', 'Dell 19" Monitor 0235', 1),
(53, '0134', 1010, NULL, NULL, 'Dell 19" Monitor 0134', 0),
(54, '0135', 1022, NULL, NULL, 'Viewsonic VA2248-LED - 0135', 1),
(55, '0236', 3, '1900-01-01', '0.00', 'Samsung S24B150BL - 0236', 1),
(56, '001049201D9A', 1008, '1900-01-01', '0.00', 'Shortel 230 - 1D9A', 1),
(57, '0010492015AE', 1008, '1900-01-01', '0.00', 'Shortel 230 - 15AE', 1),
(93, '0269', 1029, '1900-01-01', '0.00', 'TP-Link Switch - 0269', 0),
(94, '0058', 1030, NULL, NULL, 'WS0058', 1),
(95, '0270', 1031, NULL, NULL, 'MeetingRoom3', 1),
(96, '0243', 1032, NULL, NULL, 'MeetingRoom2', 1),
(97, '0271', 1027, NULL, NULL, 'Dynamode SW80010-D Switch - 0271', 0),
(123, '0281', 1045, '2014-07-18', '104.50', 'Philips 23.6" Monitor 0281', 0),
(124, '0282', 1045, '2014-07-18', '104.50', 'Philips 23.6" Monitor 0282', 1),
(125, '0283', 1045, '2014-07-18', '104.50', 'Philips 23.6" Monitor 0283', 0),
(126, '0284', 1045, '2014-07-18', '104.50', 'Philips 23.6" Monitor 0284', 1),
(127, '0285', 1045, '2014-07-18', '104.50', 'Philips 23.6" Monitor 0285', 1),
(128, '0286', 1045, '2014-07-18', '104.50', 'Philips 23.6" Monitor 0286', 1),
(129, '0287', 1045, '2014-07-18', '104.50', 'Philips 23.6" Monitor 0287', 1),
(143, '0280', 1, '2014-07-03', '403.80', 'WS0280', 1),
(144, '0296', 1, '2014-07-03', '403.80', 'WS0296', 1),
(145, '0297', 1, '2014-07-03', '403.80', 'WS0297', 1),
(146, '0298', 1, '2014-07-03', '403.80', 'WS0298', 1),
(147, '0299', 1, '2014-07-03', '403.80', 'WS0299', 1),
(148, '0052', 1036, '1900-01-01', '0.00', 'WS0052', 1),
(168, '0312', 1047, NULL, NULL, 'Epson White HD - 0312', 1),
(169, '0201', 1049, '1900-01-01', '0.00', 'Ipad 4 - 0201', 0),
(170, 'HP27J-2C496-83KXB-RGMX6-8QJQG', 1020, '1900-01-01', '0.00', 'MS Office for Jonny D', 1),
(171, '7N4QY-DFGWD-P6662-CFCHG-QYFP2', 1021, '1900-01-01', '0.00', 'MS Office for WS0215 (Simeon Laptop)', 1),
(172, '0140', 1022, '1900-01-01', '0.00', 'Viewsonic VA2248-LED - 0140', 1),
(198, '0109', 1037, '1900-01-01', '0.00', 'WS0109', 1),
(199, '0324', 1052, '1900-01-01', '0.00', 'Philips 23.6" Monitor 0324', 1)
;
CREATE TABLE [Asset-User]
([ID] int, [Asset_ID] int, [User_ID] int, [Sign_Out_Date] datetime, [Return_Date] datetime, [Perm_Signout] int)
;
INSERT INTO [Asset-User]
([ID], [Asset_ID], [User_ID], [Sign_Out_Date], [Return_Date], [Perm_Signout])
VALUES
(2, 1, 1, '2014-03-29 00:00:00', '2014-03-29 00:00:00', 0),
(3, 1, 1, '2014-03-29 00:00:00', '2014-03-29 00:00:00', 0),
(4, 1, 1, '2014-03-29 00:00:00', '2014-03-29 00:00:00', 0),
(5, 1, 1, '2014-03-29 00:00:00', '2014-03-29 00:00:00', 0),
(6, 1, 1, '2014-03-29 00:00:00', '2014-03-29 00:00:00', 0),
(7, 1, 1, '2014-03-29 00:00:00', '2014-03-29 00:00:00', 0),
(8, 1, 1, '2014-03-29 00:00:00', '2014-03-29 00:00:00', 0),
(9, 1, 1, '2014-03-29 00:00:00', '2014-03-29 00:00:00', 0),
(10, 1, 1, '2014-03-29 00:00:00', '2014-03-29 00:00:00', 0),
(11, 1, 1, '2014-03-29 00:00:00', '2014-03-29 00:00:00', 0),
(1002, 1, 1, '2014-04-01 00:00:00', '2014-04-01 00:00:00', 0),
(1003, 1, 1, '2014-01-01 00:00:00', '2014-01-01 00:00:00', 1)
;
CREATE TABLE Company_Users
([ID] int, [Name] varchar(14), [Domain_Username] varchar(14), [Dept] varchar(16), [Email] varchar(25), [DD_Ext] int, [Job_Title] varchar(30), [Deleted_Left] int)
;
INSERT INTO Company_Users
([ID], [Name], [Domain_Username], [Dept], [Email], [DD_Ext], [Job_Title], [Deleted_Left])
VALUES
(1, 'Neil Smithson', 'Neil.Smithson', '2nd Line Support', 'neil.smithson#dezrez.com', 3041, 'Second Line Support Technician', 0)
;
CREATE TABLE Global_Types
([ID] int, [Name] varchar(13), [Description] varchar(54))
;
INSERT INTO Global_Types
([ID], [Name], [Description])
VALUES
(1, 'PC', 'Desktop PC')
;
This is not really an answer but will show you how you could rework this query using aliases and the (not really) newer join style.
SELECT au.ID
, au.Asset_ID
, a.ID
, a.Signed_Out
, a.Asset_Tag_Serial_Num
, a.Name_Hostname
, a.Type_ID
, at.ID
, at.Name_Model
, au.User_ID
, cu.ID
, au.Sign_Out_Date
, au.Return_Date
, au.[Perm_Signout?]
FROM [Asset-User] au
join Assets a on au.Asset_ID = a.ID
join Asset_Type at on a.Type_ID = at.ID
join Company_Users cu on au.User_ID = cu.ID
WHERE au.User_ID = '1'
AND a.Signed_Out = '1'